repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
openjdk/jdk8
35,114
jdk/test/com/sun/jdi/TestScaffold.java
/* * Copyright (c) 2001, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ import com.sun.jdi.*; import com.sun.jdi.request.*; import com.sun.jdi.event.*; import java.util.*; import java.io.*; /** * Framework used by all JDI regression tests */ abstract public class TestScaffold extends TargetAdapter { private boolean shouldTrace = false; private VMConnection connection; private VirtualMachine vm; private EventRequestManager requestManager; private List listeners = Collections.synchronizedList(new LinkedList()); private boolean redefineAtStart = false; private boolean redefineAtEvents = false; private boolean redefineAsynchronously = false; private ReferenceType mainStartClass = null; ThreadReference mainThread; /** * We create a VMDeathRequest, SUSPEND_ALL, to sync the BE and FE. */ private VMDeathRequest ourVMDeathRequest = null; /** * We create an ExceptionRequest, SUSPEND_NONE so that we can * catch it and output a msg if an exception occurs in the * debuggee. */ private ExceptionRequest ourExceptionRequest = null; /** * If we do catch an uncaught exception, we set this true * so the testcase can find out if it wants to. */ private boolean exceptionCaught = false; ThreadReference vmStartThread = null; boolean vmDied = false; boolean vmDisconnected = false; final String[] args; protected boolean testFailed = false; static private class ArgInfo { String targetVMArgs = ""; String targetAppCommandLine = ""; String connectorSpec = "com.sun.jdi.CommandLineLaunch:"; int traceFlags = 0; } /** * An easy way to sleep for awhile */ public void mySleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ee) { } } boolean getExceptionCaught() { return exceptionCaught; } void setExceptionCaught(boolean value) { exceptionCaught = value; } /** * Return true if eventSet contains the VMDeathEvent for the request in * the ourVMDeathRequest ivar. */ private boolean containsOurVMDeathRequest(EventSet eventSet) { if (ourVMDeathRequest != null) { Iterator myIter = eventSet.iterator(); while (myIter.hasNext()) { Event myEvent = (Event)myIter.next(); if (!(myEvent instanceof VMDeathEvent)) { // We assume that an EventSet contains only VMDeathEvents // or no VMDeathEvents. break; } if (ourVMDeathRequest.equals(myEvent.request())) { return true; } } } return false; } /************************************************************************ * The following methods override those in our base class, TargetAdapter. *************************************************************************/ /** * Events handled directly by scaffold always resume (well, almost always) */ public void eventSetComplete(EventSet set) { // The listener in connect(..) resumes after receiving our // special VMDeathEvent. We can't also do the resume // here or we will probably get a VMDisconnectedException if (!containsOurVMDeathRequest(set)) { traceln("TS: set.resume() called"); set.resume(); } } /** * This method sets up default requests. * Testcases can override this to change default behavior. */ protected void createDefaultEventRequests() { createDefaultVMDeathRequest(); createDefaultExceptionRequest(); } /** * We want the BE to stop when it issues a VMDeathEvent in order to * give the FE time to complete handling events that occured before * the VMDeath. When we get the VMDeathEvent for this request in * the listener in connect(), we will do a resume. * If a testcase wants to do something special with VMDeathEvent's, * then it should override this method with an empty method or * whatever in order to suppress the automatic resume. The testcase * will then be responsible for the handling of VMDeathEvents. It * has to be sure that it does a resume if it gets a VMDeathEvent * with SUSPEND_ALL, and it has to be sure that it doesn't do a * resume after getting a VMDeath with SUSPEND_NONE (the automatically * generated VMDeathEvent.) */ protected void createDefaultVMDeathRequest() { ourVMDeathRequest = requestManager.createVMDeathRequest(); ourVMDeathRequest.setSuspendPolicy(EventRequest.SUSPEND_ALL); ourVMDeathRequest.enable(); } /** * This will allow us to print a warning if a debuggee gets an * unexpected exception. The unexpected exception will be handled in * the exceptionThrown method in the listener created in the connect() * method. * If a testcase does not want an uncaught exception to cause a * msg, it must override this method. */ protected void createDefaultExceptionRequest() { ourExceptionRequest = requestManager.createExceptionRequest(null, false, true); // We can't afford to make this be other than SUSPEND_NONE. Otherwise, // it would have to be resumed. If our connect() listener resumes it, // what about the case where the EventSet contains other events with // SUSPEND_ALL and there are other listeners who expect the BE to still // be suspended when their handlers get called? ourExceptionRequest.setSuspendPolicy(EventRequest.SUSPEND_NONE); ourExceptionRequest.enable(); } private class EventHandler implements Runnable { EventHandler() { Thread thread = new Thread(this); thread.setDaemon(true); thread.start(); } private void notifyEvent(TargetListener listener, Event event) { if (event instanceof BreakpointEvent) { listener.breakpointReached((BreakpointEvent)event); } else if (event instanceof ExceptionEvent) { listener.exceptionThrown((ExceptionEvent)event); } else if (event instanceof StepEvent) { listener.stepCompleted((StepEvent)event); } else if (event instanceof ClassPrepareEvent) { listener.classPrepared((ClassPrepareEvent)event); } else if (event instanceof ClassUnloadEvent) { listener.classUnloaded((ClassUnloadEvent)event); } else if (event instanceof MethodEntryEvent) { listener.methodEntered((MethodEntryEvent)event); } else if (event instanceof MethodExitEvent) { listener.methodExited((MethodExitEvent)event); } else if (event instanceof MonitorContendedEnterEvent) { listener.monitorContendedEnter((MonitorContendedEnterEvent)event); } else if (event instanceof MonitorContendedEnteredEvent) { listener.monitorContendedEntered((MonitorContendedEnteredEvent)event); } else if (event instanceof MonitorWaitEvent) { listener.monitorWait((MonitorWaitEvent)event); } else if (event instanceof MonitorWaitedEvent) { listener.monitorWaited((MonitorWaitedEvent)event); } else if (event instanceof AccessWatchpointEvent) { listener.fieldAccessed((AccessWatchpointEvent)event); } else if (event instanceof ModificationWatchpointEvent) { listener.fieldModified((ModificationWatchpointEvent)event); } else if (event instanceof ThreadStartEvent) { listener.threadStarted((ThreadStartEvent)event); } else if (event instanceof ThreadDeathEvent) { listener.threadDied((ThreadDeathEvent)event); } else if (event instanceof VMStartEvent) { listener.vmStarted((VMStartEvent)event); } else if (event instanceof VMDeathEvent) { listener.vmDied((VMDeathEvent)event); } else if (event instanceof VMDisconnectEvent) { listener.vmDisconnected((VMDisconnectEvent)event); } else { throw new InternalError("Unknown event type: " + event.getClass()); } } private void traceSuspendPolicy(int policy) { if (shouldTrace) { switch (policy) { case EventRequest.SUSPEND_NONE: traceln("TS: eventHandler: suspend = SUSPEND_NONE"); break; case EventRequest.SUSPEND_ALL: traceln("TS: eventHandler: suspend = SUSPEND_ALL"); break; case EventRequest.SUSPEND_EVENT_THREAD: traceln("TS: eventHandler: suspend = SUSPEND_EVENT_THREAD"); break; } } } public void run() { boolean connected = true; do { try { EventSet set = vm.eventQueue().remove(); traceSuspendPolicy(set.suspendPolicy()); synchronized (listeners) { ListIterator iter = listeners.listIterator(); while (iter.hasNext()) { TargetListener listener = (TargetListener)iter.next(); traceln("TS: eventHandler: listener = " + listener); listener.eventSetReceived(set); if (listener.shouldRemoveListener()) { iter.remove(); } else { Iterator jter = set.iterator(); while (jter.hasNext()) { Event event = (Event)jter.next(); traceln("TS: eventHandler: event = " + event.getClass()); if (event instanceof VMDisconnectEvent) { connected = false; } listener.eventReceived(event); if (listener.shouldRemoveListener()) { iter.remove(); break; } notifyEvent(listener, event); if (listener.shouldRemoveListener()) { iter.remove(); break; } } traceln("TS: eventHandler: end of events loop"); if (!listener.shouldRemoveListener()) { traceln("TS: eventHandler: calling ESC"); listener.eventSetComplete(set); if (listener.shouldRemoveListener()) { iter.remove(); } } } traceln("TS: eventHandler: end of listeners loop"); } } } catch (InterruptedException e) { traceln("TS: eventHandler: InterruptedException"); } catch (Exception e) { failure("FAILED: Exception occured in eventHandler: " + e); e.printStackTrace(); connected = false; synchronized(TestScaffold.this) { // This will make the waiters such as waitForVMDisconnect // exit their wait loops. vmDisconnected = true; TestScaffold.this.notifyAll(); } } traceln("TS: eventHandler: End of outerloop"); } while (connected); traceln("TS: eventHandler: finished"); } } /** * Constructor */ public TestScaffold(String[] args) { this.args = args; } public void enableScaffoldTrace() { this.shouldTrace = true; } public void disableScaffoldTrace() { this.shouldTrace = false; } /** * Helper for the redefine method. Build the map * needed for a redefine. */ protected Map makeRedefineMap(ReferenceType rt) throws Exception { String className = rt.name(); File path = new File(System.getProperty("test.classes", ".")); className = className.replace('.', File.separatorChar); File phyl = new File(path, className + ".class"); byte[] bytes = new byte[(int)phyl.length()]; InputStream in = new FileInputStream(phyl); in.read(bytes); in.close(); Map map = new HashMap(); map.put(rt, bytes); return map; } /** * Redefine a class - HotSwap it */ protected void redefine(ReferenceType rt) { try { println("Redefining " + rt); vm().redefineClasses(makeRedefineMap(rt)); } catch (Exception exc) { failure("FAIL: redefine - unexpected exception: " + exc); } } protected void startUp(String targetName) { List argList = new ArrayList(Arrays.asList(args)); argList.add(targetName); println("run args: " + argList); connect((String[]) argList.toArray(args)); waitForVMStart(); } protected BreakpointEvent startToMain(String targetName) { return startTo(targetName, "main", "([Ljava/lang/String;)V"); } protected BreakpointEvent startTo(String targetName, String methodName, String signature) { startUp(targetName); traceln("TS: back from startUp"); BreakpointEvent bpr = resumeTo(targetName, methodName, signature); Location loc = bpr.location(); mainStartClass = loc.declaringType(); if (redefineAtStart) { redefine(mainStartClass); } if (redefineAsynchronously) { Thread asyncDaemon = new Thread("Async Redefine") { public void run() { try { Map redefMap = makeRedefineMap(mainStartClass); while (true) { println("Redefining " + mainStartClass); vm().redefineClasses(redefMap); Thread.sleep(100); } } catch (VMDisconnectedException vmde) { println("async redefine - VM disconnected"); } catch (Exception exc) { failure("FAIL: async redefine - unexpected exception: " + exc); } } }; asyncDaemon.setDaemon(true); asyncDaemon.start(); } if (System.getProperty("jpda.wait") != null) { waitForInput(); } return bpr; } protected void waitForInput() { try { System.err.println("Press <enter> to continue"); System.in.read(); System.err.println("running..."); } catch(Exception e) { } } /* * Test cases should implement tests in runTests and should * initiate testing by calling run(). */ abstract protected void runTests() throws Exception; final public void startTests() throws Exception { try { runTests(); } finally { shutdown(); } } protected void println(String str) { System.err.println(str); } protected void print(String str) { System.err.print(str); } protected void traceln(String str) { if (shouldTrace) { println(str); } } protected void failure(String str) { println(str); testFailed = true; } private ArgInfo parseArgs(String args[]) { ArgInfo argInfo = new ArgInfo(); for (int i = 0; i < args.length; i++) { if (args[i].equals("-connect")) { i++; argInfo.connectorSpec = args[i]; } else if (args[i].equals("-trace")) { i++; argInfo.traceFlags = Integer.decode(args[i]).intValue(); } else if (args[i].equals("-redefstart")) { redefineAtStart = true; } else if (args[i].equals("-redefevent")) { redefineAtEvents = true; } else if (args[i].equals("-redefasync")) { redefineAsynchronously = true; } else if (args[i].startsWith("-J")) { argInfo.targetVMArgs += (args[i].substring(2) + ' '); /* * classpath can span two arguments so we need to handle * it specially. */ if (args[i].equals("-J-classpath")) { i++; argInfo.targetVMArgs += (args[i] + ' '); } } else { argInfo.targetAppCommandLine += (args[i] + ' '); } } return argInfo; } /** * This is called to connect to a debuggee VM. It starts the VM and * installs a listener to catch VMStartEvent, our default events, and * VMDisconnectedEvent. When these events appear, that is remembered * and waiters are notified. * This is normally called in the main thread of the test case. * It starts up an EventHandler thread that gets events coming in * from the debuggee and distributes them to listeners. That thread * keeps running until a VMDisconnectedEvent occurs or some exception * occurs during its processing. * * The 'listenUntilVMDisconnect' method adds 'this' as a listener. * This means that 'this's vmDied method will get called. This has a * default impl in TargetAdapter.java which can be overridden in the * testcase. * * waitForRequestedEvent also adds an adaptor listener that listens * for the particular event it is supposed to wait for (and it also * catches VMDisconnectEvents.) This listener is removed once * its eventReceived method is called. * waitForRequestedEvent is called by most of the methods to do bkpts, * etc. */ public void connect(String args[]) { ArgInfo argInfo = parseArgs(args); argInfo.targetVMArgs += VMConnection.getDebuggeeVMOptions(); connection = new VMConnection(argInfo.connectorSpec, argInfo.traceFlags); addListener(new TargetAdapter() { public void eventSetComplete(EventSet set) { if (TestScaffold.this.containsOurVMDeathRequest(set)) { traceln("TS: connect: set.resume() called"); set.resume(); // Note that we want to do the above resume before // waking up any sleepers. synchronized(TestScaffold.this) { TestScaffold.this.notifyAll(); } } } public void eventReceived(Event event) { if (redefineAtEvents && event instanceof Locatable) { Location loc = ((Locatable)event).location(); ReferenceType rt = loc.declaringType(); String name = rt.name(); if (name.startsWith("java.") && !name.startsWith("sun.") && !name.startsWith("com.")) { if (mainStartClass != null) { redefine(mainStartClass); } } else { redefine(rt); } } } public void vmStarted(VMStartEvent event) { synchronized(TestScaffold.this) { vmStartThread = event.thread(); TestScaffold.this.notifyAll(); } } /** * By default, we catch uncaught exceptions and print a msg. * The testcase must override the createDefaultExceptionRequest * method if it doesn't want this behavior. */ public void exceptionThrown(ExceptionEvent event) { if (TestScaffold.this.ourExceptionRequest != null && TestScaffold.this.ourExceptionRequest.equals( event.request())) { /* * See * 5038723: com/sun/jdi/sde/TemperatureTableTest.java: * intermittent ObjectCollectedException * Since this request was SUSPEND_NONE, the debuggee * could keep running and the calls below back into * the debuggee might not work. That is why we * have this try/catch. */ try { println("Note: Unexpected Debuggee Exception: " + event.exception().referenceType().name() + " at line " + event.location().lineNumber()); TestScaffold.this.exceptionCaught = true; ObjectReference obj = event.exception(); ReferenceType rtt = obj.referenceType(); Field detail = rtt.fieldByName("detailMessage"); Value val = obj.getValue(detail); println("detailMessage = " + val); /* * This code is commented out because it needs a thread * in which to do the invokeMethod and we don't have * one. To enable this code change the request * to be SUSPEND_ALL in createDefaultExceptionRequest, * and then put this line * mainThread = bpe.thread(); * in the testcase after the line * BreakpointEvent bpe = startToMain("...."); */ if (false) { List lll = rtt.methodsByName("printStackTrace"); Method mm = (Method)lll.get(0); obj.invokeMethod(mainThread, mm, new ArrayList(0), 0); } } catch (Exception ee) { println("TestScaffold Exception while handling debuggee Exception: " + ee); } } } public void vmDied(VMDeathEvent event) { vmDied = true; traceln("TS: vmDied called"); } public void vmDisconnected(VMDisconnectEvent event) { synchronized(TestScaffold.this) { vmDisconnected = true; TestScaffold.this.notifyAll(); } } }); if (connection.connector().name().equals("com.sun.jdi.CommandLineLaunch")) { if (argInfo.targetVMArgs.length() > 0) { if (connection.connectorArg("options").length() > 0) { throw new IllegalArgumentException("VM options in two places"); } connection.setConnectorArg("options", argInfo.targetVMArgs); } if (argInfo.targetAppCommandLine.length() > 0) { if (connection.connectorArg("main").length() > 0) { throw new IllegalArgumentException("Command line in two places"); } connection.setConnectorArg("main", argInfo.targetAppCommandLine); } } vm = connection.open(); requestManager = vm.eventRequestManager(); createDefaultEventRequests(); new EventHandler(); } public VirtualMachine vm() { return vm; } public EventRequestManager eventRequestManager() { return requestManager; } public void addListener(TargetListener listener) { traceln("TS: Adding listener " + listener); listeners.add(listener); } public void removeListener(TargetListener listener) { traceln("TS: Removing listener " + listener); listeners.remove(listener); } protected void listenUntilVMDisconnect() { try { addListener (this); } catch (Exception ex){ ex.printStackTrace(); testFailed = true; } finally { // Allow application to complete and shut down resumeToVMDisconnect(); } } public synchronized ThreadReference waitForVMStart() { while ((vmStartThread == null) && !vmDisconnected) { try { wait(); } catch (InterruptedException e) { } } if (vmStartThread == null) { throw new VMDisconnectedException(); } return vmStartThread; } public synchronized void waitForVMDisconnect() { traceln("TS: waitForVMDisconnect"); while (!vmDisconnected) { try { wait(); } catch (InterruptedException e) { } } traceln("TS: waitForVMDisconnect: done"); } public Event waitForRequestedEvent(final EventRequest request) { class EventNotification { Event event; boolean disconnected = false; } final EventNotification en = new EventNotification(); TargetAdapter adapter = new TargetAdapter() { public void eventReceived(Event event) { if (request.equals(event.request())) { traceln("TS:Listener2: got requested event"); synchronized (en) { en.event = event; en.notifyAll(); } removeThisListener(); } else if (event instanceof VMDisconnectEvent) { traceln("TS:Listener2: got VMDisconnectEvent"); synchronized (en) { en.disconnected = true; en.notifyAll(); } removeThisListener(); } } }; addListener(adapter); try { synchronized (en) { traceln("TS: waitForRequestedEvent: vm.resume called"); vm.resume(); while (!en.disconnected && (en.event == null)) { en.wait(); } } } catch (InterruptedException e) { return null; } if (en.disconnected) { throw new RuntimeException("VM Disconnected before requested event occurred"); } return en.event; } private StepEvent doStep(ThreadReference thread, int gran, int depth) { final StepRequest sr = requestManager.createStepRequest(thread, gran, depth); sr.addClassExclusionFilter("java.*"); sr.addClassExclusionFilter("javax.*"); sr.addClassExclusionFilter("sun.*"); sr.addClassExclusionFilter("com.sun.*"); sr.addClassExclusionFilter("com.oracle.*"); sr.addClassExclusionFilter("oracle.*"); sr.addClassExclusionFilter("jdk.internal.*"); sr.addCountFilter(1); sr.enable(); StepEvent retEvent = (StepEvent)waitForRequestedEvent(sr); requestManager.deleteEventRequest(sr); return retEvent; } public StepEvent stepIntoInstruction(ThreadReference thread) { return doStep(thread, StepRequest.STEP_MIN, StepRequest.STEP_INTO); } public StepEvent stepIntoLine(ThreadReference thread) { return doStep(thread, StepRequest.STEP_LINE, StepRequest.STEP_INTO); } public StepEvent stepOverInstruction(ThreadReference thread) { return doStep(thread, StepRequest.STEP_MIN, StepRequest.STEP_OVER); } public StepEvent stepOverLine(ThreadReference thread) { return doStep(thread, StepRequest.STEP_LINE, StepRequest.STEP_OVER); } public StepEvent stepOut(ThreadReference thread) { return doStep(thread, StepRequest.STEP_LINE, StepRequest.STEP_OUT); } public BreakpointEvent resumeTo(Location loc) { final BreakpointRequest request = requestManager.createBreakpointRequest(loc); request.addCountFilter(1); request.enable(); return (BreakpointEvent)waitForRequestedEvent(request); } public ReferenceType findReferenceType(String name) { List rts = vm.classesByName(name); Iterator iter = rts.iterator(); while (iter.hasNext()) { ReferenceType rt = (ReferenceType)iter.next(); if (rt.name().equals(name)) { return rt; } } return null; } public Method findMethod(ReferenceType rt, String name, String signature) { List methods = rt.methods(); Iterator iter = methods.iterator(); while (iter.hasNext()) { Method method = (Method)iter.next(); if (method.name().equals(name) && method.signature().equals(signature)) { return method; } } return null; } public Location findLocation(ReferenceType rt, int lineNumber) throws AbsentInformationException { List locs = rt.locationsOfLine(lineNumber); if (locs.size() == 0) { throw new IllegalArgumentException("Bad line number"); } else if (locs.size() > 1) { throw new IllegalArgumentException("Line number has multiple locations"); } return (Location)locs.get(0); } public BreakpointEvent resumeTo(String clsName, String methodName, String methodSignature) { ReferenceType rt = findReferenceType(clsName); if (rt == null) { rt = resumeToPrepareOf(clsName).referenceType(); } Method method = findMethod(rt, methodName, methodSignature); if (method == null) { throw new IllegalArgumentException("Bad method name/signature: " + clsName + "." + methodName + ":" + methodSignature); } return resumeTo(method.location()); } public BreakpointEvent resumeTo(String clsName, int lineNumber) throws AbsentInformationException { ReferenceType rt = findReferenceType(clsName); if (rt == null) { rt = resumeToPrepareOf(clsName).referenceType(); } return resumeTo(findLocation(rt, lineNumber)); } public ClassPrepareEvent resumeToPrepareOf(String className) { final ClassPrepareRequest request = requestManager.createClassPrepareRequest(); request.addClassFilter(className); request.addCountFilter(1); request.enable(); return (ClassPrepareEvent)waitForRequestedEvent(request); } public void resumeForMsecs(long msecs) { try { addListener (this); } catch (Exception ex){ ex.printStackTrace(); testFailed = true; return; } try { vm().resume(); } catch (VMDisconnectedException e) { } if (!vmDisconnected) { try { System.out.println("Sleeping for " + msecs + " milleseconds"); Thread.sleep(msecs); vm().suspend(); } catch (InterruptedException e) { } } } public void resumeToVMDisconnect() { try { traceln("TS: resumeToVMDisconnect: vm.resume called"); vm.resume(); } catch (VMDisconnectedException e) { // clean up below } waitForVMDisconnect(); } public void shutdown() { shutdown(null); } public void shutdown(String message) { traceln("TS: shutdown: vmDied= " + vmDied + ", vmDisconnected= " + vmDisconnected + ", connection = " + connection); if ((connection != null)) { try { connection.disposeVM(); } catch (VMDisconnectedException e) { // Shutting down after the VM has gone away. This is // not an error, and we just ignore it. } } else { traceln("TS: shutdown: disposeVM not called"); } if (message != null) { println(message); } vmDied = true; vmDisconnected = true; } }
apache/incubator-tuweni
35,059
crypto/src/main/java/org/apache/tuweni/crypto/sodium/PasswordHash.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.tuweni.crypto.sodium; import static java.nio.charset.StandardCharsets.UTF_8; import org.apache.tuweni.bytes.Bytes; import jnr.ffi.Pointer; import org.jetbrains.annotations.Nullable; // Documentation copied under the ISC License, from // https://github.com/jedisct1/libsodium-doc/blob/424b7480562c2e063bc8c52c452ef891621c8480/password_hashing/the_argon2i_function.md /** * The Argon2 memory-hard hashing function. * * <p> * Argon2 summarizes the state of the art in the design of memory-hard functions. * * <p> * It aims at the highest memory filling rate and effective use of multiple computing units, while still providing * defense against tradeoff attacks. * * <p> * It prevents ASICs from having a significant advantage over software implementations. * * <h3>Guidelines for choosing the parameters</h3> * * <p> * Start by determining how much memory the function can use. What will be the highest number of threads/processes * evaluating the function simultaneously (ideally, no more than 1 per CPU core)? How much physical memory is guaranteed * to be available? * * <p> * Set memlimit to the amount of memory you want to reserve for password hashing. * * <p> * Then, set opslimit to 3 and measure the time it takes to hash a password. * * <p> * If this it is way too long for your application, reduce memlimit, but keep opslimit set to 3. * * <p> * If the function is so fast that you can afford it to be more computationally intensive without any usability issues, * increase opslimit. * * <p> * For online use (e.g. login in on a website), a 1 second computation is likely to be the acceptable maximum. * * <p> * For interactive use (e.g. a desktop application), a 5 second pause after having entered a password is acceptable if * the password doesn't need to be entered more than once per session. * * <p> * For non-interactive use and infrequent use (e.g. restoring an encrypted backup), an even slower computation can be an * option. * * <p> * This class depends upon the JNR-FFI library being available on the classpath, along with its dependencies. See * https://github.com/jnr/jnr-ffi. JNR-FFI can be included using the gradle dependency 'com.github.jnr:jnr-ffi'. */ public final class PasswordHash { /** * A PasswordHash salt. */ public static final class Salt { final Allocated value; private Salt(Pointer ptr, int length) { this.value = new Allocated(ptr, length); } /** * Create a {@link Salt} from an array of bytes. * * <p> * The byte array must be of length {@link #length()}. * * @param bytes The bytes for the seed. * @return A seed. */ public static Salt fromBytes(Bytes bytes) { return fromBytes(bytes.toArrayUnsafe()); } /** * Create a {@link Salt} from an array of bytes. * * <p> * The byte array must be of length {@link #length()}. * * @param bytes The bytes for the seed. * @return A seed. */ public static Salt fromBytes(byte[] bytes) { if (bytes.length != Sodium.crypto_pwhash_saltbytes()) { throw new IllegalArgumentException( "key must be " + Sodium.crypto_pwhash_saltbytes() + " bytes, got " + bytes.length); } return Sodium.dup(bytes, Salt::new); } /** * Obtain the length of the salt in bytes (32). * * @return The length of the salt in bytes (32). */ public static int length() { long saltLength = Sodium.crypto_pwhash_saltbytes(); if (saltLength > Integer.MAX_VALUE) { throw new SodiumException("crypto_pwhash_saltbytes: " + saltLength + " is too large"); } return (int) saltLength; } /** * Generate a new salt using a random generator. * * @return A randomly generated salt. */ public static Salt random() { return Sodium.randomBytes(length(), Salt::new); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Salt)) { return false; } Salt other = (Salt) obj; return other.value.equals(value); } @Override public int hashCode() { return value.hashCode(); } /** * Provides the bytes of this salt. * * @return The bytes of this salt. */ public Bytes bytes() { return value.bytes(); } /** * Provides the bytes of this salt. * * @return The bytes of this salt. */ public byte[] bytesArray() { return value.bytesArray(); } } /** * A PasswordHash algorithm. */ public static final class Algorithm { private static Algorithm ARGON2I13 = new Algorithm("argon2i13", 1, 3, true); private static Algorithm ARGON2ID13 = new Algorithm("argon2id13", 2, 1, Sodium.supportsVersion(Sodium.VERSION_10_0_13)); private final String name; private final int id; private final long minOps; private final boolean supported; private Algorithm(String name, int id, long minOps, boolean supported) { this.name = name; this.id = id; this.minOps = minOps; this.supported = supported; } /** * Returns the currently recommended algorithm * * @return The currently recommended algorithm. */ public static Algorithm recommended() { return ARGON2ID13.isSupported() ? ARGON2ID13 : ARGON2I13; } /** * Returns the version 1.3 of the Argon2i algorithm. * * @return Version 1.3 of the Argon2i algorithm. */ public static Algorithm argon2i13() { return ARGON2I13; } /** * Returns the version 1.3 of the Argon2id algorithm. * * @return Version 1.3 of the Argon2id algorithm. */ public static Algorithm argon2id13() { return ARGON2ID13; } @Nullable static Algorithm fromId(int id) { if (ARGON2ID13.id == id) { return ARGON2ID13; } else if (ARGON2I13.id == id) { return ARGON2I13; } return null; } public String name() { return name; } int id() { return id; } public boolean isSupported() { return supported; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Algorithm)) { return false; } Algorithm other = (Algorithm) obj; return this.id == other.id; } @Override public int hashCode() { return Integer.hashCode(id); } @Override public String toString() { return name; } } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for most use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static Bytes hash(String password, int length, Salt salt) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt)); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for most use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static Bytes hash(Bytes password, int length, Salt salt) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt)); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for most use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static byte[] hash(byte[] password, int length, Salt salt) { return hash(password, length, salt, moderateOpsLimit(), moderateMemLimit(), Algorithm.recommended()); } /** * Compute a key from a password, using limits on operations and memory that are suitable for most use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hash(String password, int length, Salt salt, Algorithm algorithm) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt, algorithm)); } /** * Compute a key from a password, using limits on operations and memory that are suitable for most use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hash(Bytes password, int length, Salt salt, Algorithm algorithm) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt, algorithm)); } /** * Compute a key from a password, using limits on operations and memory that are suitable for most use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static byte[] hash(byte[] password, int length, Salt salt, Algorithm algorithm) { return hash(password, length, salt, moderateOpsLimit(), moderateMemLimit(), algorithm); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for interactive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static Bytes hashInteractive(String password, int length, Salt salt) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt, Algorithm.recommended())); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for interactive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static Bytes hashInteractive(Bytes password, int length, Salt salt) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt, Algorithm.recommended())); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for interactive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static byte[] hashInteractive(byte[] password, int length, Salt salt) { return hash(password, length, salt, interactiveOpsLimit(), interactiveMemLimit(), Algorithm.recommended()); } /** * Compute a key from a password, using limits on operations and memory that are suitable for interactive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hashInteractive(String password, int length, Salt salt, Algorithm algorithm) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt, algorithm)); } /** * Compute a key from a password, using limits on operations and memory that are suitable for interactive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hashInteractive(Bytes password, int length, Salt salt, Algorithm algorithm) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt, algorithm)); } /** * Compute a key from a password, using limits on operations and memory that are suitable for interactive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static byte[] hashInteractive(byte[] password, int length, Salt salt, Algorithm algorithm) { return hash(password, length, salt, interactiveOpsLimit(), interactiveMemLimit(), algorithm); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for sensitive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static Bytes hashSensitive(String password, int length, Salt salt) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt, Algorithm.recommended())); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for sensitive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static Bytes hashSensitive(Bytes password, int length, Salt salt) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt, Algorithm.recommended())); } /** * Compute a key from a password, using the currently recommended algorithm and limits on operations and memory that * are suitable for sensitive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @return The derived key. */ public static byte[] hashSensitive(byte[] password, int length, Salt salt) { return hash(password, length, salt, sensitiveOpsLimit(), sensitiveMemLimit(), Algorithm.recommended()); } /** * Compute a key from a password, using limits on operations and memory that are suitable for sensitive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hashSensitive(String password, int length, Salt salt, Algorithm algorithm) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt, algorithm)); } /** * Compute a key from a password, using limits on operations and memory that are suitable for sensitive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hashSensitive(Bytes password, int length, Salt salt, Algorithm algorithm) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt, algorithm)); } /** * Compute a key from a password, using limits on operations and memory that are suitable for sensitive use-cases. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param algorithm The algorithm to use. * @return The derived key. */ public static byte[] hashSensitive(byte[] password, int length, Salt salt, Algorithm algorithm) { return hash(password, length, salt, sensitiveOpsLimit(), sensitiveMemLimit(), algorithm); } /** * Compute a key from a password. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param opsLimit The operations limit, which must be in the range {@link #minOpsLimit()} to {@link #maxOpsLimit()}. * @param memLimit The memory limit, which must be in the range {@link #minMemLimit()} to {@link #maxMemLimit()}. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hash(String password, int length, Salt salt, long opsLimit, long memLimit, Algorithm algorithm) { return Bytes.wrap(hash(password.getBytes(UTF_8), length, salt, opsLimit, memLimit, algorithm)); } /** * Compute a key from a password. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param opsLimit The operations limit, which must be in the range {@link #minOpsLimit()} to {@link #maxOpsLimit()}. * @param memLimit The memory limit, which must be in the range {@link #minMemLimit()} to {@link #maxMemLimit()}. * @param algorithm The algorithm to use. * @return The derived key. */ public static Bytes hash(Bytes password, int length, Salt salt, long opsLimit, long memLimit, Algorithm algorithm) { return Bytes.wrap(hash(password.toArrayUnsafe(), length, salt, opsLimit, memLimit, algorithm)); } /** * Compute a key from a password. * * @param password The password to hash. * @param length The key length to generate. * @param salt A salt. * @param opsLimit The operations limit, which must be in the range {@link #minOpsLimit()} to {@link #maxOpsLimit()}. * @param memLimit The memory limit, which must be in the range {@link #minMemLimit()} to {@link #maxMemLimit()}. * @param algorithm The algorithm to use. * @return The derived key. * @throws IllegalArgumentException If the opsLimit is too low for the specified algorithm. * @throws UnsupportedOperationException If the specified algorithm is not supported by the currently loaded sodium * native library. */ public static byte[] hash(byte[] password, int length, Salt salt, long opsLimit, long memLimit, Algorithm algorithm) { assertHashLength(length); assertOpsLimit(opsLimit); assertMemLimit(memLimit); if (opsLimit < algorithm.minOps) { throw new IllegalArgumentException("opsLimit " + opsLimit + " too low for specified algorithm"); } if (!algorithm.isSupported()) { throw new UnsupportedOperationException( algorithm.name() + " is not supported by the currently loaded sodium native library"); } byte[] out = new byte[length]; int rc = Sodium .crypto_pwhash(out, length, password, password.length, salt.value.pointer(), opsLimit, memLimit, algorithm.id); if (rc != 0) { throw new SodiumException("crypto_pwhash: failed with result " + rc); } return out; } /** * Returns the minimum hash length * * @return The minimum hash length (16). */ public static int minHashLength() { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { return 16; } long len = Sodium.crypto_pwhash_bytes_min(); if (len > Integer.MAX_VALUE) { throw new IllegalStateException("crypto_pwhash_bytes_min: " + len + " is too large"); } return (int) len; } /** * Returns the maximum hash length * * @return The maximum hash length. */ public static int maxHashLength() { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { return Integer.MAX_VALUE; } long len = Sodium.crypto_pwhash_bytes_max(); if (len > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } return (int) len; } private static void assertHashLength(int length) { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { if (length < 16) { throw new IllegalArgumentException("length out of range"); } return; } if (length < Sodium.crypto_pwhash_bytes_min() || length > Sodium.crypto_pwhash_bytes_max()) { throw new IllegalArgumentException("length out of range"); } } /** * Compute a hash from a password, using limits on operations and memory that are suitable for most use-cases. * * <p> * Equivalent to {@code hash(password, moderateOpsLimit(), moderateMemLimit())}. * * @param password The password to hash. * @return The hash string. */ public static String hash(String password) { return hash(password, moderateOpsLimit(), moderateMemLimit()); } /** * Compute a hash from a password, using limits on operations and memory that are suitable for interactive use-cases. * * <p> * Equivalent to {@code hash(password, sensitiveOpsLimit(), sensitiveMemLimit())}. * * @param password The password to hash. * @return The hash string. */ public static String hashInteractive(String password) { return hash(password, interactiveOpsLimit(), interactiveMemLimit()); } /** * Compute a hash from a password, using limits on operations and memory that are suitable for sensitive use-cases. * * <p> * Equivalent to {@code hash(password, sensitiveOpsLimit(), sensitiveMemLimit())}. * * @param password The password to hash. * @return The hash string. */ public static String hashSensitive(String password) { return hash(password, sensitiveOpsLimit(), sensitiveMemLimit()); } /** * Compute a hash from a password. * * @param password The password to hash. * @param opsLimit The operations limit, which must be in the range {@link #minOpsLimit()} to {@link #maxOpsLimit()}. * @param memLimit The memory limit, which must be in the range {@link #minMemLimit()} to {@link #maxMemLimit()}. * @return The hash string. */ public static String hash(String password, long opsLimit, long memLimit) { assertOpsLimit(opsLimit); assertMemLimit(memLimit); byte[] out = new byte[hashStringLength()]; byte[] pwBytes = password.getBytes(UTF_8); int rc = Sodium.crypto_pwhash_str(out, pwBytes, pwBytes.length, opsLimit, memLimit); if (rc != 0) { throw new SodiumException("crypto_pwhash_str: failed with result " + rc); } int i = 0; while (i < out.length && out[i] != 0) { ++i; } return new String(out, 0, i, UTF_8); } /** * Verify a password against a hash. * * @param hash The hash. * @param password The password to verify. * @return {@code true} if the password matches the hash. */ public static boolean verify(String hash, String password) { byte[] hashBytes = hash.getBytes(UTF_8); int hashLength = hashStringLength(); if (hashBytes.length >= hashLength) { return false; } Pointer str = Sodium.malloc(hashLength); try { str.put(0, hashBytes, 0, hashBytes.length); str.putByte(hashBytes.length, (byte) 0); byte[] pwBytes = password.getBytes(UTF_8); return Sodium.crypto_pwhash_str_verify(str, pwBytes, pwBytes.length) == 0; } finally { Sodium.sodium_free(str); } } private static void assertCheckRehashAvailable() { if (!Sodium.supportsVersion(Sodium.VERSION_10_0_14)) { throw new UnsupportedOperationException( "Sodium re-hash checking is not available (requires sodium native library version >= 10.0.14)"); } } /** * A hash verification result. * * <p> * Note: methods returning this result are only supported when the sodium native library version &gt;= 10.0.14 is * available. */ public enum VerificationResult { /** The hash verification failed. */ FAILED, /** The hash verification passed. */ PASSED, /** * The hash verification passed, but the hash is out-of-date and should be regenerated. */ NEEDS_REHASH; /** * Returns true if the verification passed. * * @return {@code true} if the verification passed. */ public boolean passed() { return this != FAILED; } /** * Returns true if the hash should be regenerated. * * @return {@code true} if the hash should be regenerated. */ public boolean needsRehash() { return this == NEEDS_REHASH; } } /** * Verify a password against a hash and check the hash is suitable for normal use-cases. * * <p> * Equivalent to {@code verify(hash, password, moderateOpsLimit(), moderateMemLimit())}. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @param password The password to verify. * @return The result of verification. */ public static VerificationResult checkHash(String hash, String password) { return checkHash(hash, password, moderateOpsLimit(), moderateMemLimit()); } /** * Verify a password against a hash and check the hash is suitable for interactive use-cases. * * <p> * Equivalent to {@code verify(hash, password, interactiveOpsLimit(), interactiveMemLimit())}. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @param password The password to verify. * @return The result of verification. */ public static VerificationResult checkHashForInteractive(String hash, String password) { return checkHash(hash, password, interactiveOpsLimit(), interactiveMemLimit()); } /** * Verify a password against a hash and check the hash is suitable for sensitive use-cases. * * <p> * Equivalent to {@code verify(hash, password, sensitiveOpsLimit(), sensitiveMemLimit())}. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @param password The password to verify. * @return The result of verification. */ public static VerificationResult checkHashForSensitive(String hash, String password) { return checkHash(hash, password, sensitiveOpsLimit(), sensitiveMemLimit()); } /** * Verify a password against a hash. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @param password The password to verify. * @param opsLimit The operations limit, which must be in the range {@link #minOpsLimit()} to {@link #maxOpsLimit()}. * @param memLimit The memory limit, which must be in the range {@link #minMemLimit()} to {@link #maxMemLimit()}. * @return The result of verification. */ public static VerificationResult checkHash(String hash, String password, long opsLimit, long memLimit) { assertCheckRehashAvailable(); assertOpsLimit(opsLimit); assertMemLimit(memLimit); byte[] hashBytes = hash.getBytes(UTF_8); int hashLength = hashStringLength(); if (hashBytes.length >= hashLength) { return VerificationResult.FAILED; } Pointer str = Sodium.malloc(hashLength); try { str.put(0, hashBytes, 0, hashBytes.length); str.putByte(hashBytes.length, (byte) 0); byte[] pwBytes = password.getBytes(UTF_8); if (Sodium.crypto_pwhash_str_verify(str, pwBytes, pwBytes.length) != 0) { return VerificationResult.FAILED; } int rc = Sodium.crypto_pwhash_str_needs_rehash(str, opsLimit, memLimit); if (rc < 0) { throw new SodiumException("crypto_pwhash_str_needs_rehash: failed with result " + rc); } return (rc == 0) ? VerificationResult.PASSED : VerificationResult.NEEDS_REHASH; } finally { Sodium.sodium_free(str); } } /** * Check if a hash needs to be regenerated using limits on operations and memory that are suitable for most use-cases. * * <p> * Equivalent to {@code needsRehash(hash, moderateOpsLimit(), moderateMemLimit())}. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @return {@code true} if the hash should be regenerated. */ public static boolean needsRehash(String hash) { return needsRehash(hash, moderateOpsLimit(), moderateMemLimit()); } /** * Check if a hash needs to be regenerated using limits on operations and memory that are suitable for interactive * use-cases. * * <p> * Equivalent to {@code needsRehash(hash, interactiveOpsLimit(), interactiveMemLimit())}. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @return {@code true} if the hash should be regenerated. */ public static boolean needsRehashForInteractive(String hash) { return needsRehash(hash, interactiveOpsLimit(), interactiveMemLimit()); } /** * Check if a hash needs to be regenerated using limits on operations and memory that are suitable for sensitive * use-cases. * * <p> * Equivalent to {@code needsRehash(hash, sensitiveOpsLimit(), sensitiveMemLimit())}. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @return {@code true} if the hash should be regenerated. */ public static boolean needsRehashForSensitive(String hash) { return needsRehash(hash, sensitiveOpsLimit(), sensitiveMemLimit()); } /** * Check if a hash needs to be regenerated. * * <p> * Check if a hash matches the parameters opslimit and memlimit, and the current default algorithm. * * <p> * Note: only supported when the sodium native library version &gt;= 10.0.14 is available. * * @param hash The hash. * @param opsLimit The operations limit, which must be in the range {@link #minOpsLimit()} to {@link #maxOpsLimit()}. * @param memLimit The memory limit, which must be in the range {@link #minMemLimit()} to {@link #maxMemLimit()}. * @return {@code true} if the hash should be regenerated. */ public static boolean needsRehash(String hash, long opsLimit, long memLimit) { assertCheckRehashAvailable(); assertOpsLimit(opsLimit); assertMemLimit(memLimit); byte[] hashBytes = hash.getBytes(UTF_8); int hashLength = hashStringLength(); if (hashBytes.length >= hashLength) { throw new IllegalArgumentException("hash is too long"); } Pointer str = Sodium.malloc(hashLength); try { str.put(0, hashBytes, 0, hashBytes.length); str.putByte(hashBytes.length, (byte) 0); int rc = Sodium.crypto_pwhash_str_needs_rehash(str, opsLimit, memLimit); if (rc < 0) { throw new SodiumException("crypto_pwhash_str_needs_rehash: failed with result " + rc); } return (rc != 0); } finally { Sodium.sodium_free(str); } } private static int hashStringLength() { long hashLength = Sodium.crypto_pwhash_strbytes(); if (hashLength > Integer.MAX_VALUE) { throw new IllegalStateException("crypto_pwhash_strbytes: " + hashLength + " is too large"); } return (int) hashLength; } /** * Returns the minimum operations limit * * @return The minimum operations limit (1). */ public static long minOpsLimit() { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { return 3; } return Sodium.crypto_pwhash_opslimit_min(); } /** * Returns an operations limit for interactive use-cases * * @return An operations limit suitable for interactive use-cases (2). */ public static long interactiveOpsLimit() { return Sodium.crypto_pwhash_opslimit_interactive(); } /** * Returns an operations limit for most use-cases * * @return An operations limit suitable for most use-cases (3). */ public static long moderateOpsLimit() { return Sodium.crypto_pwhash_opslimit_moderate(); } /** * Returns an operations limit for sensitive use-cases (4). * * @return An operations limit for sensitive use-cases (4). */ public static long sensitiveOpsLimit() { return Sodium.crypto_pwhash_opslimit_sensitive(); } /** * Returns the maximum operations limit. * * @return The maximum operations limit (4294967295). */ public static long maxOpsLimit() { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { return 4294967295L; } return Sodium.crypto_pwhash_opslimit_max(); } private static void assertOpsLimit(long opsLimit) { if (opsLimit < minOpsLimit() || opsLimit > maxOpsLimit()) { throw new IllegalArgumentException("opsLimit out of range"); } } /** * Returns the minimum memory limit. * * @return The minimum memory limit (8192). */ public static long minMemLimit() { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { return 8192; } return Sodium.crypto_pwhash_memlimit_min(); } /** * Returns a memory limit for interactive use-cases. * * @return A memory limit suitable for interactive use-cases (67108864). */ public static long interactiveMemLimit() { return Sodium.crypto_pwhash_memlimit_interactive(); } /** * Returns a memory limit for most use-cases * * @return A memory limit suitable for most use-cases (268435456). */ public static long moderateMemLimit() { return Sodium.crypto_pwhash_memlimit_moderate(); } /** * Returns a memory limit for sensitive use-cases * * @return A memory limit suitable for sensitive use-cases (1073741824). */ public static long sensitiveMemLimit() { return Sodium.crypto_pwhash_memlimit_sensitive(); } /** * Returns the max memory limit. * * @return The maximum memory limit (4398046510080). */ public static long maxMemLimit() { // When support for 10.0.11 is dropped, remove this if (!Sodium.supportsVersion(Sodium.VERSION_10_0_12)) { return 4398046510080L; } return Sodium.crypto_pwhash_memlimit_max(); } private static void assertMemLimit(long memLimit) { if (memLimit < minMemLimit() || memLimit > maxMemLimit()) { throw new IllegalArgumentException("memLimit out of range"); } } }
googleapis/google-cloud-java
35,062
java-eventarc/google-cloud-eventarc/src/test/java/com/google/cloud/eventarc/v1/MockEventarcImpl.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.eventarc.v1; import com.google.api.core.BetaApi; import com.google.cloud.eventarc.v1.EventarcGrpc.EventarcImplBase; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; import javax.annotation.Generated; @BetaApi @Generated("by gapic-generator-java") public class MockEventarcImpl extends EventarcImplBase { private List<AbstractMessage> requests; private Queue<Object> responses; public MockEventarcImpl() { requests = new ArrayList<>(); responses = new LinkedList<>(); } public List<AbstractMessage> getRequests() { return requests; } public void addResponse(AbstractMessage response) { responses.add(response); } public void setResponses(List<AbstractMessage> responses) { this.responses = new LinkedList<Object>(responses); } public void addException(Exception exception) { responses.add(exception); } public void reset() { requests = new ArrayList<>(); responses = new LinkedList<>(); } @Override public void getTrigger(GetTriggerRequest request, StreamObserver<Trigger> responseObserver) { Object response = responses.poll(); if (response instanceof Trigger) { requests.add(request); responseObserver.onNext(((Trigger) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetTrigger, expected %s or %s", response == null ? "null" : response.getClass().getName(), Trigger.class.getName(), Exception.class.getName()))); } } @Override public void listTriggers( ListTriggersRequest request, StreamObserver<ListTriggersResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListTriggersResponse) { requests.add(request); responseObserver.onNext(((ListTriggersResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListTriggers, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListTriggersResponse.class.getName(), Exception.class.getName()))); } } @Override public void createTrigger( CreateTriggerRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateTrigger, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateTrigger( UpdateTriggerRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateTrigger, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteTrigger( DeleteTriggerRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteTrigger, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getChannel(GetChannelRequest request, StreamObserver<Channel> responseObserver) { Object response = responses.poll(); if (response instanceof Channel) { requests.add(request); responseObserver.onNext(((Channel) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Channel.class.getName(), Exception.class.getName()))); } } @Override public void listChannels( ListChannelsRequest request, StreamObserver<ListChannelsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListChannelsResponse) { requests.add(request); responseObserver.onNext(((ListChannelsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListChannels, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListChannelsResponse.class.getName(), Exception.class.getName()))); } } @Override public void createChannel( CreateChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateChannel( UpdateChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteChannel( DeleteChannelRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteChannel, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getProvider(GetProviderRequest request, StreamObserver<Provider> responseObserver) { Object response = responses.poll(); if (response instanceof Provider) { requests.add(request); responseObserver.onNext(((Provider) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetProvider, expected %s or %s", response == null ? "null" : response.getClass().getName(), Provider.class.getName(), Exception.class.getName()))); } } @Override public void listProviders( ListProvidersRequest request, StreamObserver<ListProvidersResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListProvidersResponse) { requests.add(request); responseObserver.onNext(((ListProvidersResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListProviders, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListProvidersResponse.class.getName(), Exception.class.getName()))); } } @Override public void getChannelConnection( GetChannelConnectionRequest request, StreamObserver<ChannelConnection> responseObserver) { Object response = responses.poll(); if (response instanceof ChannelConnection) { requests.add(request); responseObserver.onNext(((ChannelConnection) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetChannelConnection, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ChannelConnection.class.getName(), Exception.class.getName()))); } } @Override public void listChannelConnections( ListChannelConnectionsRequest request, StreamObserver<ListChannelConnectionsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListChannelConnectionsResponse) { requests.add(request); responseObserver.onNext(((ListChannelConnectionsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListChannelConnections, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ListChannelConnectionsResponse.class.getName(), Exception.class.getName()))); } } @Override public void createChannelConnection( CreateChannelConnectionRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateChannelConnection, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteChannelConnection( DeleteChannelConnectionRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteChannelConnection, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getGoogleChannelConfig( GetGoogleChannelConfigRequest request, StreamObserver<GoogleChannelConfig> responseObserver) { Object response = responses.poll(); if (response instanceof GoogleChannelConfig) { requests.add(request); responseObserver.onNext(((GoogleChannelConfig) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetGoogleChannelConfig, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), GoogleChannelConfig.class.getName(), Exception.class.getName()))); } } @Override public void updateGoogleChannelConfig( UpdateGoogleChannelConfigRequest request, StreamObserver<GoogleChannelConfig> responseObserver) { Object response = responses.poll(); if (response instanceof GoogleChannelConfig) { requests.add(request); responseObserver.onNext(((GoogleChannelConfig) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateGoogleChannelConfig, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), GoogleChannelConfig.class.getName(), Exception.class.getName()))); } } @Override public void getMessageBus( GetMessageBusRequest request, StreamObserver<MessageBus> responseObserver) { Object response = responses.poll(); if (response instanceof MessageBus) { requests.add(request); responseObserver.onNext(((MessageBus) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetMessageBus, expected %s or %s", response == null ? "null" : response.getClass().getName(), MessageBus.class.getName(), Exception.class.getName()))); } } @Override public void listMessageBuses( ListMessageBusesRequest request, StreamObserver<ListMessageBusesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListMessageBusesResponse) { requests.add(request); responseObserver.onNext(((ListMessageBusesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListMessageBuses, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListMessageBusesResponse.class.getName(), Exception.class.getName()))); } } @Override public void listMessageBusEnrollments( ListMessageBusEnrollmentsRequest request, StreamObserver<ListMessageBusEnrollmentsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListMessageBusEnrollmentsResponse) { requests.add(request); responseObserver.onNext(((ListMessageBusEnrollmentsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListMessageBusEnrollments, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), ListMessageBusEnrollmentsResponse.class.getName(), Exception.class.getName()))); } } @Override public void createMessageBus( CreateMessageBusRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateMessageBus, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateMessageBus( UpdateMessageBusRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateMessageBus, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteMessageBus( DeleteMessageBusRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteMessageBus, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getEnrollment( GetEnrollmentRequest request, StreamObserver<Enrollment> responseObserver) { Object response = responses.poll(); if (response instanceof Enrollment) { requests.add(request); responseObserver.onNext(((Enrollment) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetEnrollment, expected %s or %s", response == null ? "null" : response.getClass().getName(), Enrollment.class.getName(), Exception.class.getName()))); } } @Override public void listEnrollments( ListEnrollmentsRequest request, StreamObserver<ListEnrollmentsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListEnrollmentsResponse) { requests.add(request); responseObserver.onNext(((ListEnrollmentsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListEnrollments, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListEnrollmentsResponse.class.getName(), Exception.class.getName()))); } } @Override public void createEnrollment( CreateEnrollmentRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateEnrollment, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateEnrollment( UpdateEnrollmentRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateEnrollment, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteEnrollment( DeleteEnrollmentRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteEnrollment, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getPipeline(GetPipelineRequest request, StreamObserver<Pipeline> responseObserver) { Object response = responses.poll(); if (response instanceof Pipeline) { requests.add(request); responseObserver.onNext(((Pipeline) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetPipeline, expected %s or %s", response == null ? "null" : response.getClass().getName(), Pipeline.class.getName(), Exception.class.getName()))); } } @Override public void listPipelines( ListPipelinesRequest request, StreamObserver<ListPipelinesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListPipelinesResponse) { requests.add(request); responseObserver.onNext(((ListPipelinesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListPipelines, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListPipelinesResponse.class.getName(), Exception.class.getName()))); } } @Override public void createPipeline( CreatePipelineRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreatePipeline, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updatePipeline( UpdatePipelineRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdatePipeline, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deletePipeline( DeletePipelineRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeletePipeline, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getGoogleApiSource( GetGoogleApiSourceRequest request, StreamObserver<GoogleApiSource> responseObserver) { Object response = responses.poll(); if (response instanceof GoogleApiSource) { requests.add(request); responseObserver.onNext(((GoogleApiSource) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetGoogleApiSource, expected %s or %s", response == null ? "null" : response.getClass().getName(), GoogleApiSource.class.getName(), Exception.class.getName()))); } } @Override public void listGoogleApiSources( ListGoogleApiSourcesRequest request, StreamObserver<ListGoogleApiSourcesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListGoogleApiSourcesResponse) { requests.add(request); responseObserver.onNext(((ListGoogleApiSourcesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListGoogleApiSources, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ListGoogleApiSourcesResponse.class.getName(), Exception.class.getName()))); } } @Override public void createGoogleApiSource( CreateGoogleApiSourceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateGoogleApiSource, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateGoogleApiSource( UpdateGoogleApiSourceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateGoogleApiSource, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteGoogleApiSource( DeleteGoogleApiSourceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteGoogleApiSource, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } }
apache/geode
34,884
geode-core/src/main/java/org/apache/geode/internal/cache/wan/GatewaySenderStats.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.wan; import org.apache.geode.StatisticDescriptor; import org.apache.geode.Statistics; import org.apache.geode.StatisticsFactory; import org.apache.geode.StatisticsType; import org.apache.geode.StatisticsTypeFactory; import org.apache.geode.annotations.Immutable; import org.apache.geode.distributed.internal.DistributionStats; import org.apache.geode.internal.statistics.StatisticsClock; import org.apache.geode.internal.statistics.StatisticsTypeFactoryImpl; public class GatewaySenderStats { public static final String typeName = "GatewaySenderStatistics"; /** The <code>StatisticsType</code> of the statistics */ @Immutable private static final StatisticsType type; //////////////////// Statistic "Id" Fields //////////////////// /** Name of the events received statistic */ protected static final String EVENTS_RECEIVED = "eventsReceived"; /** Name of the events queued statistic */ protected static final String EVENTS_QUEUED = "eventsQueued"; /** Name of the events not queued because conflated statistic */ protected static final String EVENTS_NOT_QUEUED_CONFLATED = "eventsNotQueuedConflated"; /** Name of the events conflated from the batch statistic */ protected static final String EVENTS_CONFLATED_FROM_BATCHES = "eventsConflatedFromBatches"; /** Name of the event queue time statistic */ protected static final String EVENT_QUEUE_TIME = "eventQueueTime"; /** Name of the event queue size statistic */ protected static final String EVENT_QUEUE_SIZE = "eventQueueSize"; /** Name of the secondary event queue size statistic */ protected static final String SECONDARY_EVENT_QUEUE_SIZE = "secondaryEventQueueSize"; /** Total number of events processed by queue removal message statistic */ protected static final String EVENTS_PROCESSED_BY_PQRM = "eventsProcessedByPQRM"; /** Name of the event temporary queue size statistic */ protected static final String TMP_EVENT_QUEUE_SIZE = "tempQueueSize"; /** Name of the events distributed statistic */ protected static final String EVENTS_DISTRIBUTED = "eventsDistributed"; /** Name of the events exceeding alert threshold statistic */ protected static final String EVENTS_EXCEEDING_ALERT_THRESHOLD = "eventsExceedingAlertThreshold"; /** Name of the batch distribution time statistic */ protected static final String BATCH_DISTRIBUTION_TIME = "batchDistributionTime"; /** Name of the batches distributed statistic */ protected static final String BATCHES_DISTRIBUTED = "batchesDistributed"; /** Name of the batches redistributed statistic */ protected static final String BATCHES_REDISTRIBUTED = "batchesRedistributed"; /** Name of the batches redistributed statistic */ protected static final String BATCHES_WITH_INCOMPLETE_TRANSACTIONS = "batchesWithIncompleteTransactions"; /** Name of the batches resized statistic */ protected static final String BATCHES_RESIZED = "batchesResized"; /** Name of the unprocessed events added by primary statistic */ protected static final String UNPROCESSED_TOKENS_ADDED_BY_PRIMARY = "unprocessedTokensAddedByPrimary"; /** Name of the unprocessed events added by secondary statistic */ protected static final String UNPROCESSED_EVENTS_ADDED_BY_SECONDARY = "unprocessedEventsAddedBySecondary"; /** Name of the unprocessed events removed by primary statistic */ protected static final String UNPROCESSED_EVENTS_REMOVED_BY_PRIMARY = "unprocessedEventsRemovedByPrimary"; /** Name of the unprocessed events removed by secondary statistic */ protected static final String UNPROCESSED_TOKENS_REMOVED_BY_SECONDARY = "unprocessedTokensRemovedBySecondary"; protected static final String UNPROCESSED_EVENTS_REMOVED_BY_TIMEOUT = "unprocessedEventsRemovedByTimeout"; protected static final String UNPROCESSED_TOKENS_REMOVED_BY_TIMEOUT = "unprocessedTokensRemovedByTimeout"; /** Name of the unprocessed events map size statistic */ protected static final String UNPROCESSED_EVENT_MAP_SIZE = "unprocessedEventMapSize"; protected static final String UNPROCESSED_TOKEN_MAP_SIZE = "unprocessedTokenMapSize"; protected static final String CONFLATION_INDEXES_MAP_SIZE = "conflationIndexesSize"; protected static final String EVENTS_FILTERED = "eventsFiltered"; protected static final String NOT_QUEUED_EVENTS = "notQueuedEvent"; protected static final String EVENTS_DROPPED_DUE_TO_PRIMARY_SENDER_NOT_RUNNING = "eventsDroppedDueToPrimarySenderNotRunning"; protected static final String LOAD_BALANCES_COMPLETED = "loadBalancesCompleted"; protected static final String LOAD_BALANCES_IN_PROGRESS = "loadBalancesInProgress"; protected static final String LOAD_BALANCE_TIME = "loadBalanceTime"; protected static final String SYNCHRONIZATION_EVENTS_ENQUEUED = "synchronizationEventsEnqueued"; protected static final String SYNCHRONIZATION_EVENTS_PROVIDED = "synchronizationEventsProvided"; /** Id of the events queued statistic */ private static final int eventsReceivedId; /** Id of the events queued statistic */ private static final int eventsQueuedId; /** Id of the events not queued because conflated statistic */ private static final int eventsNotQueuedConflatedId; /** Id of the event queue time statistic */ private static final int eventQueueTimeId; /** Id of the event queue size statistic */ private static final int eventQueueSizeId; /** Id of the event in secondary queue size statistic */ private static final int secondaryEventQueueSizeId; /** Id of the events processed by Parallel Queue Removal Message(PQRM) statistic */ private static final int eventsProcessedByPQRMId; /** Id of the temp event queue size statistic */ private static final int eventTmpQueueSizeId; /** Id of the events distributed statistic */ private static final int eventsDistributedId; /** Id of the events exceeding alert threshold statistic */ private static final int eventsExceedingAlertThresholdId; /** Id of the batch distribution time statistic */ private static final int batchDistributionTimeId; /** Id of the batches distributed statistic */ private static final int batchesDistributedId; /** Id of the batches redistributed statistic */ private static final int batchesRedistributedId; /** Id of the batches with incomplete transactions statistic */ private static final int batchesWithIncompleteTransactionsId; /** Id of the batches resized statistic */ private static final int batchesResizedId; /** Id of the unprocessed events added by primary statistic */ private static final int unprocessedTokensAddedByPrimaryId; /** Id of the unprocessed events added by secondary statistic */ private static final int unprocessedEventsAddedBySecondaryId; /** Id of the unprocessed events removed by primary statistic */ private static final int unprocessedEventsRemovedByPrimaryId; /** Id of the unprocessed events removed by secondary statistic */ private static final int unprocessedTokensRemovedBySecondaryId; private static final int unprocessedEventsRemovedByTimeoutId; private static final int unprocessedTokensRemovedByTimeoutId; /** Id of the unprocessed events map size statistic */ private static final int unprocessedEventMapSizeId; private static final int unprocessedTokenMapSizeId; /** Id of the conflation indexes size statistic */ private static final int conflationIndexesMapSizeId; /** Id of filtered events */ private static final int eventsFilteredId; /** Id of not queued events */ private static final int notQueuedEventsId; /** Id of events dropped due to primary sender not running */ private static final int eventsDroppedDueToPrimarySenderNotRunningId; /** Id of events conflated in batch */ private static final int eventsConflatedFromBatchesId; /** Id of load balances completed */ private static final int loadBalancesCompletedId; /** Id of load balances in progress */ private static final int loadBalancesInProgressId; /** Id of load balance time */ private static final int loadBalanceTimeId; /** Id of synchronization events enqueued */ private static final int synchronizationEventsEnqueuedId; /** Id of synchronization events provided */ private static final int synchronizationEventsProvidedId; /* * Static initializer to create and initialize the <code>StatisticsType</code> */ static { StatisticsTypeFactory f = StatisticsTypeFactoryImpl.singleton(); type = createType(f, typeName, "Stats for activity in the GatewaySender"); // Initialize id fields eventsReceivedId = type.nameToId(EVENTS_RECEIVED); eventsQueuedId = type.nameToId(EVENTS_QUEUED); eventsNotQueuedConflatedId = type.nameToId(EVENTS_NOT_QUEUED_CONFLATED); eventQueueTimeId = type.nameToId(EVENT_QUEUE_TIME); eventQueueSizeId = type.nameToId(EVENT_QUEUE_SIZE); secondaryEventQueueSizeId = type.nameToId(SECONDARY_EVENT_QUEUE_SIZE); eventsProcessedByPQRMId = type.nameToId(EVENTS_PROCESSED_BY_PQRM); eventTmpQueueSizeId = type.nameToId(TMP_EVENT_QUEUE_SIZE); eventsDistributedId = type.nameToId(EVENTS_DISTRIBUTED); eventsExceedingAlertThresholdId = type.nameToId(EVENTS_EXCEEDING_ALERT_THRESHOLD); batchDistributionTimeId = type.nameToId(BATCH_DISTRIBUTION_TIME); batchesDistributedId = type.nameToId(BATCHES_DISTRIBUTED); batchesRedistributedId = type.nameToId(BATCHES_REDISTRIBUTED); batchesWithIncompleteTransactionsId = type.nameToId(BATCHES_WITH_INCOMPLETE_TRANSACTIONS); batchesResizedId = type.nameToId(BATCHES_RESIZED); unprocessedTokensAddedByPrimaryId = type.nameToId(UNPROCESSED_TOKENS_ADDED_BY_PRIMARY); unprocessedEventsAddedBySecondaryId = type.nameToId(UNPROCESSED_EVENTS_ADDED_BY_SECONDARY); unprocessedEventsRemovedByPrimaryId = type.nameToId(UNPROCESSED_EVENTS_REMOVED_BY_PRIMARY); unprocessedTokensRemovedBySecondaryId = type.nameToId(UNPROCESSED_TOKENS_REMOVED_BY_SECONDARY); unprocessedEventsRemovedByTimeoutId = type.nameToId(UNPROCESSED_EVENTS_REMOVED_BY_TIMEOUT); unprocessedTokensRemovedByTimeoutId = type.nameToId(UNPROCESSED_TOKENS_REMOVED_BY_TIMEOUT); unprocessedEventMapSizeId = type.nameToId(UNPROCESSED_EVENT_MAP_SIZE); unprocessedTokenMapSizeId = type.nameToId(UNPROCESSED_TOKEN_MAP_SIZE); conflationIndexesMapSizeId = type.nameToId(CONFLATION_INDEXES_MAP_SIZE); notQueuedEventsId = type.nameToId(NOT_QUEUED_EVENTS); eventsDroppedDueToPrimarySenderNotRunningId = type.nameToId(EVENTS_DROPPED_DUE_TO_PRIMARY_SENDER_NOT_RUNNING); eventsFilteredId = type.nameToId(EVENTS_FILTERED); eventsConflatedFromBatchesId = type.nameToId(EVENTS_CONFLATED_FROM_BATCHES); loadBalancesCompletedId = type.nameToId(LOAD_BALANCES_COMPLETED); loadBalancesInProgressId = type.nameToId(LOAD_BALANCES_IN_PROGRESS); loadBalanceTimeId = type.nameToId(LOAD_BALANCE_TIME); synchronizationEventsEnqueuedId = type.nameToId(SYNCHRONIZATION_EVENTS_ENQUEUED); synchronizationEventsProvidedId = type.nameToId(SYNCHRONIZATION_EVENTS_PROVIDED); } protected static StatisticsType createType(final StatisticsTypeFactory f, final String typeName, final String description) { return f.createType(typeName, description, new StatisticDescriptor[] { f.createIntCounter(EVENTS_RECEIVED, "Number of events received by this queue.", "operations"), f.createIntCounter(EVENTS_QUEUED, "Number of events added to the event queue.", "operations"), f.createLongCounter(EVENT_QUEUE_TIME, "Total time spent queueing events.", "nanoseconds"), f.createIntGauge(EVENT_QUEUE_SIZE, "Size of the event queue.", "operations", false), f.createIntGauge(SECONDARY_EVENT_QUEUE_SIZE, "Size of the secondary event queue.", "operations", false), f.createIntGauge(EVENTS_PROCESSED_BY_PQRM, "Total number of events processed by Parallel Queue Removal Message(PQRM).", "operations", false), f.createIntGauge(TMP_EVENT_QUEUE_SIZE, "Size of the temporary events queue.", "operations", false), f.createIntCounter(EVENTS_NOT_QUEUED_CONFLATED, "Number of events received but not added to the event queue because the queue already contains an event with the event's key.", "operations"), f.createIntCounter(EVENTS_CONFLATED_FROM_BATCHES, "Number of events conflated from batches.", "operations"), f.createIntCounter(EVENTS_DISTRIBUTED, "Number of events removed from the event queue and sent.", "operations"), f.createIntCounter(EVENTS_EXCEEDING_ALERT_THRESHOLD, "Number of events exceeding the alert threshold.", "operations", false), f.createLongCounter(BATCH_DISTRIBUTION_TIME, "Total time spent distributing batches of events to receivers.", "nanoseconds"), f.createIntCounter(BATCHES_DISTRIBUTED, "Number of batches of events removed from the event queue and sent.", "operations"), f.createIntCounter(BATCHES_REDISTRIBUTED, "Number of batches of events removed from the event queue and resent.", "operations", false), f.createLongCounter(BATCHES_WITH_INCOMPLETE_TRANSACTIONS, "Number of batches of events sent with incomplete transactions.", "operations", false), f.createIntCounter(BATCHES_RESIZED, "Number of batches that were resized because they were too large", "operations", false), f.createIntCounter(UNPROCESSED_TOKENS_ADDED_BY_PRIMARY, "Number of tokens added to the secondary's unprocessed token map by the primary (though a listener).", "tokens"), f.createIntCounter(UNPROCESSED_EVENTS_ADDED_BY_SECONDARY, "Number of events added to the secondary's unprocessed event map by the secondary.", "events"), f.createIntCounter(UNPROCESSED_EVENTS_REMOVED_BY_PRIMARY, "Number of events removed from the secondary's unprocessed event map by the primary (though a listener).", "events"), f.createIntCounter(UNPROCESSED_TOKENS_REMOVED_BY_SECONDARY, "Number of tokens removed from the secondary's unprocessed token map by the secondary.", "tokens"), f.createIntCounter(UNPROCESSED_EVENTS_REMOVED_BY_TIMEOUT, "Number of events removed from the secondary's unprocessed event map by a timeout.", "events"), f.createIntCounter(UNPROCESSED_TOKENS_REMOVED_BY_TIMEOUT, "Number of tokens removed from the secondary's unprocessed token map by a timeout.", "tokens"), f.createIntGauge(UNPROCESSED_EVENT_MAP_SIZE, "Current number of entries in the secondary's unprocessed event map.", "events", false), f.createIntGauge(UNPROCESSED_TOKEN_MAP_SIZE, "Current number of entries in the secondary's unprocessed token map.", "tokens", false), f.createIntGauge(CONFLATION_INDEXES_MAP_SIZE, "Current number of entries in the conflation indexes map.", "events"), f.createIntCounter(NOT_QUEUED_EVENTS, "Number of events not added to queue.", "events"), f.createIntCounter(EVENTS_DROPPED_DUE_TO_PRIMARY_SENDER_NOT_RUNNING, "Number of events dropped because the primary gateway sender is not running.", "events"), f.createIntCounter(EVENTS_FILTERED, "Number of events filtered through GatewayEventFilter.", "events"), f.createIntCounter(LOAD_BALANCES_COMPLETED, "Number of load balances completed", "operations"), f.createIntGauge(LOAD_BALANCES_IN_PROGRESS, "Number of load balances in progress", "operations"), f.createLongCounter(LOAD_BALANCE_TIME, "Total time spent load balancing this sender", "nanoseconds"), f.createIntCounter(SYNCHRONIZATION_EVENTS_ENQUEUED, "Number of synchronization events added to the event queue.", "operations"), f.createIntCounter(SYNCHRONIZATION_EVENTS_PROVIDED, "Number of synchronization events provided to other members.", "operations"),}); } ////////////////////// Instance Fields ////////////////////// /** The <code>Statistics</code> instance to which most behavior is delegated */ private final Statistics stats; private final StatisticsClock statisticsClock; /////////////////////// Constructors /////////////////////// /** * Constructor. * * @param f The <code>StatisticsFactory</code> which creates the <code>Statistics</code> instance * @param gatewaySenderId The id of the <code>GatewaySender</code> used to generate the name of * the <code>Statistics</code> */ public GatewaySenderStats(StatisticsFactory f, String textIdPrefix, String gatewaySenderId, StatisticsClock statisticsClock) { this(f, textIdPrefix + gatewaySenderId, type, statisticsClock); } /** * Constructor. * * @param f The <code>StatisticsFactory</code> which creates the <code>Statistics</code> instance * @param asyncQueueId The id of the <code>AsyncEventQueue</code> used to generate the name of the * <code>Statistics</code> * @param statType The StatisticsTYpe */ public GatewaySenderStats(StatisticsFactory f, String textIdPrefix, String asyncQueueId, StatisticsType statType, StatisticsClock statisticsClock) { this(f, textIdPrefix + asyncQueueId, statType, statisticsClock); } private GatewaySenderStats(StatisticsFactory f, String textId, StatisticsType statType, StatisticsClock statisticsClock) { stats = f.createAtomicStatistics(statType, textId); this.statisticsClock = statisticsClock; } ///////////////////// Instance Methods ///////////////////// /** * Closes the <code>GatewaySenderStats</code>. */ public void close() { stats.close(); } /** * Returns the current value of the "eventsReceived" stat. * * @return the current value of the "eventsReceived" stat */ public int getEventsReceived() { return stats.getInt(eventsReceivedId); } /** * Increments the number of events received by 1. */ public void incEventsReceived() { stats.incInt(eventsReceivedId, 1); } /** * Returns the current value of the "eventsQueued" stat. * * @return the current value of the "eventsQueued" stat */ public int getEventsQueued() { return stats.getInt(eventsQueuedId); } /** * Returns the current value of the "eventsNotQueuedConflated" stat. * * @return the current value of the "eventsNotQueuedConflated" stat */ public int getEventsNotQueuedConflated() { return stats.getInt(eventsNotQueuedConflatedId); } /** * Returns the current value of the "eventsConflatedFromBatches" stat. * * @return the current value of the "eventsConflatedFromBatches" stat */ public int getEventsConflatedFromBatches() { return stats.getInt(eventsConflatedFromBatchesId); } /** * Returns the current value of the "eventQueueSize" stat. * * @return the current value of the "eventQueueSize" stat */ public int getEventQueueSize() { return stats.getInt(eventQueueSizeId); } /** * Returns the current value of the "secondaryEventQueueSize" stat. * * @return the current value of the "secondaryEventQueueSize" stat */ public int getSecondaryEventQueueSize() { return stats.getInt(secondaryEventQueueSizeId); } /** * Returns the current value of the "eventsProcessedByPQRM" stat. * * @return the current value of the "eventsProcessedByPQRM" stat */ public int getEventsProcessedByPQRM() { return stats.getInt(eventsProcessedByPQRMId); } /** * Returns the current value of the "tempQueueSize" stat. * * @return the current value of the "tempQueueSize" stat. */ public int getTempEventQueueSize() { return stats.getInt(eventTmpQueueSizeId); } /** Returns the internal ID for {@link #getEventQueueSize()} statistic */ public static String getEventQueueSizeId() { return EVENT_QUEUE_SIZE; } /** Returns the internal ID for {@link #getTempEventQueueSize()} statistic */ public static String getEventTempQueueSizeId() { return TMP_EVENT_QUEUE_SIZE; } /** * Returns the current value of the "eventsDistributed" stat. * * @return the current value of the "eventsDistributed" stat */ public int getEventsDistributed() { return stats.getInt(eventsDistributedId); } /** * Returns the current value of the "eventsExceedingAlertThreshold" stat. * * @return the current value of the "eventsExceedingAlertThreshold" stat */ public int getEventsExceedingAlertThreshold() { return stats.getInt(eventsExceedingAlertThresholdId); } /** * Increments the value of the "eventsExceedingAlertThreshold" stat by 1. */ public void incEventsExceedingAlertThreshold() { stats.incInt(eventsExceedingAlertThresholdId, 1); } /** * Returns the current value of the "batchDistributionTime" stat. * * @return the current value of the "batchDistributionTime" stat */ public long getBatchDistributionTime() { return stats.getLong(batchDistributionTimeId); } /** * Returns the current value of the "batchesDistributed" stat. * * @return the current value of the "batchesDistributed" stat */ public int getBatchesDistributed() { return stats.getInt(batchesDistributedId); } /** * Returns the current value of the "batchesRedistributed" stat. * * @return the current value of the "batchesRedistributed" stat */ public int getBatchesRedistributed() { return stats.getInt(batchesRedistributedId); } /** * Returns the current value of the "batchesWithIncompleteTransactions" stat. * * @return the current value of the "batchesWithIncompleteTransactions" stat */ public long getBatchesWithIncompleteTransactions() { return stats.getLong(batchesWithIncompleteTransactionsId); } /** * Returns the current value of the "batchesResized" stat. * * @return the current value of the "batchesResized" stat */ public int getBatchesResized() { return stats.getInt(batchesResizedId); } /** * Increments the value of the "batchesRedistributed" stat by 1. */ public void incBatchesRedistributed() { stats.incInt(batchesRedistributedId, 1); } /** * Increments the value of the "batchesWithIncompleteTransactions" stat by 1. */ public void incBatchesWithIncompleteTransactions() { stats.incLong(batchesWithIncompleteTransactionsId, 1); } /** * Increments the value of the "batchesRedistributed" stat by 1. */ public void incBatchesResized() { stats.incInt(batchesResizedId, 1); } /** * Sets the "eventQueueSize" stat. * * @param size The size of the queue */ public void setQueueSize(int size) { stats.setInt(eventQueueSizeId, size); } /** * Sets the "secondaryEventQueueSize" stat. * * @param size The size of the secondary queue */ public void setSecondaryQueueSize(int size) { stats.setInt(secondaryEventQueueSizeId, size); } /** * Sets the "eventsProcessedByPQRM" stat. * * @param size The total number of the events processed by queue removal message */ public void setEventsProcessedByPQRM(int size) { stats.setInt(eventsProcessedByPQRMId, size); } /** * Sets the "tempQueueSize" stat. * * @param size The size of the temp queue */ public void setTempQueueSize(int size) { stats.setInt(eventTmpQueueSizeId, size); } /** * Increments the "eventQueueSize" stat by 1. */ public void incQueueSize() { stats.incInt(eventQueueSizeId, 1); } /** * Increments the "secondaryEventQueueSize" stat by 1. */ public void incSecondaryQueueSize() { stats.incInt(secondaryEventQueueSizeId, 1); } /** * Increments the "tempQueueSize" stat by 1. */ public void incTempQueueSize() { stats.incInt(eventTmpQueueSizeId, 1); } /** * Increments the "eventQueueSize" stat by given delta. * * @param delta an integer by which queue size to be increased */ public void incQueueSize(int delta) { stats.incInt(eventQueueSizeId, delta); } /** * Increments the "secondaryEventQueueSize" stat by given delta. * * @param delta an integer by which secondary event queue size to be increased */ public void incSecondaryQueueSize(int delta) { stats.incInt(secondaryEventQueueSizeId, delta); } /** * Increments the "eventsProcessedByPQRM" stat by given delta. * * @param delta an integer by which events are processed by queue removal message */ public void incEventsProcessedByPQRM(int delta) { stats.incInt(eventsProcessedByPQRMId, delta); } /** * Increments the "tempQueueSize" stat by given delta. * * @param delta an integer by which temp queue size to be increased */ public void incTempQueueSize(int delta) { stats.incInt(eventTmpQueueSizeId, delta); } /** * Decrements the "eventQueueSize" stat by 1. */ public void decQueueSize() { stats.incInt(eventQueueSizeId, -1); } /** * Decrements the "secondaryEventQueueSize" stat by 1. */ public void decSecondaryQueueSize() { stats.incInt(secondaryEventQueueSizeId, -1); } /** * Decrements the "tempQueueSize" stat by 1. */ public void decTempQueueSize() { stats.incInt(eventTmpQueueSizeId, -1); } /** * Decrements the "eventQueueSize" stat by given delta. * * @param delta an integer by which queue size to be increased */ public void decQueueSize(int delta) { stats.incInt(eventQueueSizeId, -delta); } /** * Decrements the "secondaryEventQueueSize" stat by given delta. * * @param delta an integer by which secondary queue size to be increased */ public void decSecondaryQueueSize(int delta) { stats.incInt(secondaryEventQueueSizeId, -delta); } /** * Decrements the "tempQueueSize" stat by given delta. * * @param delta an integer by which temp queue size to be increased */ public void decTempQueueSize(int delta) { stats.incInt(eventTmpQueueSizeId, -delta); } /** * Increments the "eventsNotQueuedConflated" stat. */ public void incEventsNotQueuedConflated() { stats.incInt(eventsNotQueuedConflatedId, 1); } /** * Increments the "eventsConflatedFromBatches" stat. */ public void incEventsConflatedFromBatches(int numEvents) { stats.incInt(eventsConflatedFromBatchesId, numEvents); } /** * Returns the current value of the "unprocessedTokensAddedByPrimary" stat. * * @return the current value of the "unprocessedTokensAddedByPrimary" stat */ public int getUnprocessedTokensAddedByPrimary() { return stats.getInt(unprocessedTokensAddedByPrimaryId); } /** * Returns the current value of the "unprocessedEventsAddedBySecondary" stat. * * @return the current value of the "unprocessedEventsAddedBySecondary" stat */ public int getUnprocessedEventsAddedBySecondary() { return stats.getInt(unprocessedEventsAddedBySecondaryId); } /** * Returns the current value of the "unprocessedEventsRemovedByPrimary" stat. * * @return the current value of the "unprocessedEventsRemovedByPrimary" stat */ public int getUnprocessedEventsRemovedByPrimary() { return stats.getInt(unprocessedEventsRemovedByPrimaryId); } /** * Returns the current value of the "unprocessedTokensRemovedBySecondary" stat. * * @return the current value of the "unprocessedTokensRemovedBySecondary" stat */ public int getUnprocessedTokensRemovedBySecondary() { return stats.getInt(unprocessedTokensRemovedBySecondaryId); } /** * Returns the current value of the "unprocessedEventMapSize" stat. * * @return the current value of the "unprocessedEventMapSize" stat */ public int getUnprocessedEventMapSize() { return stats.getInt(unprocessedEventMapSizeId); } public int getUnprocessedTokenMapSize() { return stats.getInt(unprocessedTokenMapSizeId); } public void incEventsNotQueued() { stats.incInt(notQueuedEventsId, 1); } public int getEventsNotQueued() { return stats.getInt(notQueuedEventsId); } public void incEventsDroppedDueToPrimarySenderNotRunning() { stats.incInt(eventsDroppedDueToPrimarySenderNotRunningId, 1); } public int getEventsDroppedDueToPrimarySenderNotRunning() { return stats.getInt(eventsDroppedDueToPrimarySenderNotRunningId); } public void incEventsFiltered() { stats.incInt(eventsFilteredId, 1); } public int getEventsFiltered() { return stats.getInt(eventsFilteredId); } /** * Increments the value of the "unprocessedTokensAddedByPrimary" stat by 1. */ public void incUnprocessedTokensAddedByPrimary() { stats.incInt(unprocessedTokensAddedByPrimaryId, 1); incUnprocessedTokenMapSize(); } /** * Increments the value of the "unprocessedEventsAddedBySecondary" stat by 1. */ public void incUnprocessedEventsAddedBySecondary() { stats.incInt(unprocessedEventsAddedBySecondaryId, 1); incUnprocessedEventMapSize(); } /** * Increments the value of the "unprocessedEventsRemovedByPrimary" stat by 1. */ public void incUnprocessedEventsRemovedByPrimary() { stats.incInt(unprocessedEventsRemovedByPrimaryId, 1); decUnprocessedEventMapSize(); } /** * Increments the value of the "unprocessedTokensRemovedBySecondary" stat by 1. */ public void incUnprocessedTokensRemovedBySecondary() { stats.incInt(unprocessedTokensRemovedBySecondaryId, 1); decUnprocessedTokenMapSize(); } public void incUnprocessedEventsRemovedByTimeout(int count) { stats.incInt(unprocessedEventsRemovedByTimeoutId, count); decUnprocessedEventMapSize(count); } public void incUnprocessedTokensRemovedByTimeout(int count) { stats.incInt(unprocessedTokensRemovedByTimeoutId, count); decUnprocessedTokenMapSize(count); } /** * Sets the "unprocessedEventMapSize" stat. */ public void clearUnprocessedMaps() { stats.setInt(unprocessedEventMapSizeId, 0); stats.setInt(unprocessedTokenMapSizeId, 0); } private void incUnprocessedEventMapSize() { stats.incInt(unprocessedEventMapSizeId, 1); } private void decUnprocessedEventMapSize() { stats.incInt(unprocessedEventMapSizeId, -1); } private void decUnprocessedEventMapSize(int decCount) { stats.incInt(unprocessedEventMapSizeId, -decCount); } private void incUnprocessedTokenMapSize() { stats.incInt(unprocessedTokenMapSizeId, 1); } private void decUnprocessedTokenMapSize() { stats.incInt(unprocessedTokenMapSizeId, -1); } private void decUnprocessedTokenMapSize(int decCount) { stats.incInt(unprocessedTokenMapSizeId, -decCount); } /** * Increments the value of the "conflationIndexesMapSize" stat by 1 */ public void incConflationIndexesMapSize() { stats.incInt(conflationIndexesMapSizeId, 1); } /** * Decrements the value of the "conflationIndexesMapSize" stat by 1 */ public void decConflationIndexesMapSize() { stats.incInt(conflationIndexesMapSizeId, -1); } /** * Gets the value of the "conflationIndexesMapSize" stat */ public int getConflationIndexesMapSize() { return stats.getInt(conflationIndexesMapSizeId); } /** * Returns the current time (ns). * * @return the current time (ns) */ public long startTime() { return DistributionStats.getStatTime(); } /** * Increments the "eventsDistributed" and "batchDistributionTime" stats. * * @param start The start of the batch (which is decremented from the current time to determine * the batch processing time). * @param numberOfEvents The number of events to add to the events distributed stat */ public void endBatch(long start, int numberOfEvents) { long ts = DistributionStats.getStatTime(); // Increment number of batches distributed stats.incInt(batchesDistributedId, 1); // Increment number of events distributed stats.incInt(eventsDistributedId, numberOfEvents); // Increment batch distribution time long elapsed = ts - start; stats.incLong(batchDistributionTimeId, elapsed); } /** * Increments the "eventsQueued" and "eventQueueTime" stats. * * @param start The start of the put (which is decremented from the current time to determine the * queue processing time). */ public void endPut(long start) { long ts = DistributionStats.getStatTime(); // Increment number of event queued stats.incInt(eventsQueuedId, 1); // Increment event queue time long elapsed = ts - start; stats.incLong(eventQueueTimeId, elapsed); } public long startLoadBalance() { stats.incInt(loadBalancesInProgressId, 1); return statisticsClock.getTime(); } public void endLoadBalance(long start) { long delta = statisticsClock.getTime() - start; stats.incInt(loadBalancesInProgressId, -1); stats.incInt(loadBalancesCompletedId, 1); stats.incLong(loadBalanceTimeId, delta); } /** * Increments the number of synchronization events enqueued. */ public void incSynchronizationEventsEnqueued() { stats.incInt(synchronizationEventsEnqueuedId, 1); } /** * Increments the number of synchronization events provided. */ public void incSynchronizationEventsProvided() { stats.incInt(synchronizationEventsProvidedId, 1); } public Statistics getStats() { return stats; } }
googleapis/google-cloud-java
34,999
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/UpdateBusinessInfoRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1beta/businessinfo.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1beta; /** * * * <pre> * Request message for the `UpdateBusinessInfo` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest} */ public final class UpdateBusinessInfoRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) UpdateBusinessInfoRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateBusinessInfoRequest.newBuilder() to construct. private UpdateBusinessInfoRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateBusinessInfoRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateBusinessInfoRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.BusinessInfoProto .internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessInfoRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.BusinessInfoProto .internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest.class, com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest.Builder.class); } private int bitField0_; public static final int BUSINESS_INFO_FIELD_NUMBER = 1; private com.google.shopping.merchant.accounts.v1beta.BusinessInfo businessInfo_; /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the businessInfo field is set. */ @java.lang.Override public boolean hasBusinessInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The businessInfo. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessInfo getBusinessInfo() { return businessInfo_ == null ? com.google.shopping.merchant.accounts.v1beta.BusinessInfo.getDefaultInstance() : businessInfo_; } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.BusinessInfoOrBuilder getBusinessInfoOrBuilder() { return businessInfo_ == null ? com.google.shopping.merchant.accounts.v1beta.BusinessInfo.getDefaultInstance() : businessInfo_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getBusinessInfo()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getBusinessInfo()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest other = (com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) obj; if (hasBusinessInfo() != other.hasBusinessInfo()) return false; if (hasBusinessInfo()) { if (!getBusinessInfo().equals(other.getBusinessInfo())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasBusinessInfo()) { hash = (37 * hash) + BUSINESS_INFO_FIELD_NUMBER; hash = (53 * hash) + getBusinessInfo().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `UpdateBusinessInfo` method. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.BusinessInfoProto .internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessInfoRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.BusinessInfoProto .internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessInfoRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest.class, com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest.Builder.class); } // Construct using // com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getBusinessInfoFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; businessInfo_ = null; if (businessInfoBuilder_ != null) { businessInfoBuilder_.dispose(); businessInfoBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1beta.BusinessInfoProto .internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessInfoRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest build() { com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest buildPartial() { com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest result = new com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.businessInfo_ = businessInfoBuilder_ == null ? businessInfo_ : businessInfoBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) { return mergeFrom( (com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest other) { if (other == com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest .getDefaultInstance()) return this; if (other.hasBusinessInfo()) { mergeBusinessInfo(other.getBusinessInfo()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getBusinessInfoFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.shopping.merchant.accounts.v1beta.BusinessInfo businessInfo_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1beta.BusinessInfo, com.google.shopping.merchant.accounts.v1beta.BusinessInfo.Builder, com.google.shopping.merchant.accounts.v1beta.BusinessInfoOrBuilder> businessInfoBuilder_; /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the businessInfo field is set. */ public boolean hasBusinessInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The businessInfo. */ public com.google.shopping.merchant.accounts.v1beta.BusinessInfo getBusinessInfo() { if (businessInfoBuilder_ == null) { return businessInfo_ == null ? com.google.shopping.merchant.accounts.v1beta.BusinessInfo.getDefaultInstance() : businessInfo_; } else { return businessInfoBuilder_.getMessage(); } } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBusinessInfo( com.google.shopping.merchant.accounts.v1beta.BusinessInfo value) { if (businessInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } businessInfo_ = value; } else { businessInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBusinessInfo( com.google.shopping.merchant.accounts.v1beta.BusinessInfo.Builder builderForValue) { if (businessInfoBuilder_ == null) { businessInfo_ = builderForValue.build(); } else { businessInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeBusinessInfo( com.google.shopping.merchant.accounts.v1beta.BusinessInfo value) { if (businessInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && businessInfo_ != null && businessInfo_ != com.google.shopping.merchant.accounts.v1beta.BusinessInfo.getDefaultInstance()) { getBusinessInfoBuilder().mergeFrom(value); } else { businessInfo_ = value; } } else { businessInfoBuilder_.mergeFrom(value); } if (businessInfo_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearBusinessInfo() { bitField0_ = (bitField0_ & ~0x00000001); businessInfo_ = null; if (businessInfoBuilder_ != null) { businessInfoBuilder_.dispose(); businessInfoBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.accounts.v1beta.BusinessInfo.Builder getBusinessInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getBusinessInfoFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.accounts.v1beta.BusinessInfoOrBuilder getBusinessInfoOrBuilder() { if (businessInfoBuilder_ != null) { return businessInfoBuilder_.getMessageOrBuilder(); } else { return businessInfo_ == null ? com.google.shopping.merchant.accounts.v1beta.BusinessInfo.getDefaultInstance() : businessInfo_; } } /** * * * <pre> * Required. The new version of the business info. * </pre> * * <code> * .google.shopping.merchant.accounts.v1beta.BusinessInfo business_info = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1beta.BusinessInfo, com.google.shopping.merchant.accounts.v1beta.BusinessInfo.Builder, com.google.shopping.merchant.accounts.v1beta.BusinessInfoOrBuilder> getBusinessInfoFieldBuilder() { if (businessInfoBuilder_ == null) { businessInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.accounts.v1beta.BusinessInfo, com.google.shopping.merchant.accounts.v1beta.BusinessInfo.Builder, com.google.shopping.merchant.accounts.v1beta.BusinessInfoOrBuilder>( getBusinessInfo(), getParentForChildren(), isClean()); businessInfo_ = null; } return businessInfoBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest) private static final com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest(); } public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateBusinessInfoRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateBusinessInfoRequest>() { @java.lang.Override public UpdateBusinessInfoRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateBusinessInfoRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateBusinessInfoRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessInfoRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop-common
34,988
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentSkipListMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueState; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt.ContainersAndNMTokensAllocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.ContainerExpiredSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.server.utils.Lock; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; import com.google.common.annotations.VisibleForTesting; @LimitedPrivate("yarn") @Evolving @SuppressWarnings("unchecked") public class FifoScheduler extends AbstractYarnScheduler<FiCaSchedulerApp, FiCaSchedulerNode> implements Configurable { private static final Log LOG = LogFactory.getLog(FifoScheduler.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); Configuration conf; private boolean usePortForNodeName; private ActiveUsersManager activeUsersManager; private static final String DEFAULT_QUEUE_NAME = "default"; private QueueMetrics metrics; private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator(); private final Queue DEFAULT_QUEUE = new Queue() { @Override public String getQueueName() { return DEFAULT_QUEUE_NAME; } @Override public QueueMetrics getMetrics() { return metrics; } @Override public QueueInfo getQueueInfo( boolean includeChildQueues, boolean recursive) { QueueInfo queueInfo = recordFactory.newRecordInstance(QueueInfo.class); queueInfo.setQueueName(DEFAULT_QUEUE.getQueueName()); queueInfo.setCapacity(1.0f); if (clusterResource.getMemory() == 0) { queueInfo.setCurrentCapacity(0.0f); } else { queueInfo.setCurrentCapacity((float) usedResource.getMemory() / clusterResource.getMemory()); } queueInfo.setMaximumCapacity(1.0f); queueInfo.setChildQueues(new ArrayList<QueueInfo>()); queueInfo.setQueueState(QueueState.RUNNING); return queueInfo; } public Map<QueueACL, AccessControlList> getQueueAcls() { Map<QueueACL, AccessControlList> acls = new HashMap<QueueACL, AccessControlList>(); for (QueueACL acl : QueueACL.values()) { acls.put(acl, new AccessControlList("*")); } return acls; } @Override public List<QueueUserACLInfo> getQueueUserAclInfo( UserGroupInformation unused) { QueueUserACLInfo queueUserAclInfo = recordFactory.newRecordInstance(QueueUserACLInfo.class); queueUserAclInfo.setQueueName(DEFAULT_QUEUE_NAME); queueUserAclInfo.setUserAcls(Arrays.asList(QueueACL.values())); return Collections.singletonList(queueUserAclInfo); } @Override public boolean hasAccess(QueueACL acl, UserGroupInformation user) { return getQueueAcls().get(acl).isUserAllowed(user); } @Override public ActiveUsersManager getActiveUsersManager() { return activeUsersManager; } @Override public void recoverContainer(Resource clusterResource, SchedulerApplicationAttempt schedulerAttempt, RMContainer rmContainer) { if (rmContainer.getState().equals(RMContainerState.COMPLETED)) { return; } increaseUsedResources(rmContainer); updateAppHeadRoom(schedulerAttempt); updateAvailableResourcesMetrics(); } }; public FifoScheduler() { super(FifoScheduler.class.getName()); } private synchronized void initScheduler(Configuration conf) { validateConf(conf); //Use ConcurrentSkipListMap because applications need to be ordered this.applications = new ConcurrentSkipListMap<ApplicationId, SchedulerApplication<FiCaSchedulerApp>>(); this.minimumAllocation = Resources.createResource(conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB)); this.maximumAllocation = Resources.createResource(conf.getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB)); this.usePortForNodeName = conf.getBoolean( YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, YarnConfiguration.DEFAULT_RM_SCHEDULER_USE_PORT_FOR_NODE_NAME); this.metrics = QueueMetrics.forQueue(DEFAULT_QUEUE_NAME, null, false, conf); this.activeUsersManager = new ActiveUsersManager(metrics); } @Override public void serviceInit(Configuration conf) throws Exception { initScheduler(conf); super.serviceInit(conf); } @Override public void serviceStart() throws Exception { super.serviceStart(); } @Override public void serviceStop() throws Exception { super.serviceStop(); } @Override public synchronized void setConf(Configuration conf) { this.conf = conf; } private void validateConf(Configuration conf) { // validate scheduler memory allocation setting int minMem = conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); int maxMem = conf.getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); if (minMem <= 0 || minMem > maxMem) { throw new YarnRuntimeException("Invalid resource scheduler memory" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB + "=" + minMem + ", " + YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + "=" + maxMem + ", min and max should be greater than 0" + ", max should be no smaller than min."); } } @Override public synchronized Configuration getConf() { return conf; } @Override public int getNumClusterNodes() { return nodes.size(); } @Override public synchronized void setRMContext(RMContext rmContext) { this.rmContext = rmContext; } @Override public synchronized void reinitialize(Configuration conf, RMContext rmContext) throws IOException { setConf(conf); } @Override public Allocation allocate( ApplicationAttemptId applicationAttemptId, List<ResourceRequest> ask, List<ContainerId> release, List<String> blacklistAdditions, List<String> blacklistRemovals) { FiCaSchedulerApp application = getApplicationAttempt(applicationAttemptId); if (application == null) { LOG.error("Calling allocate on removed " + "or non existant application " + applicationAttemptId); return EMPTY_ALLOCATION; } // Sanity check SchedulerUtils.normalizeRequests(ask, resourceCalculator, clusterResource, minimumAllocation, maximumAllocation); // Release containers releaseContainers(release, application); synchronized (application) { // make sure we aren't stopping/removing the application // when the allocate comes in if (application.isStopped()) { LOG.info("Calling allocate on a stopped " + "application " + applicationAttemptId); return EMPTY_ALLOCATION; } if (!ask.isEmpty()) { LOG.debug("allocate: pre-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); // Update application requests application.updateResourceRequests(ask); LOG.debug("allocate: post-update" + " applicationId=" + applicationAttemptId + " application=" + application); application.showRequests(); LOG.debug("allocate:" + " applicationId=" + applicationAttemptId + " #ask=" + ask.size()); } application.updateBlacklist(blacklistAdditions, blacklistRemovals); ContainersAndNMTokensAllocation allocation = application.pullNewlyAllocatedContainersAndNMTokens(); return new Allocation(allocation.getContainerList(), application.getHeadroom(), null, null, null, allocation.getNMTokenList()); } } private FiCaSchedulerNode getNode(NodeId nodeId) { return nodes.get(nodeId); } @VisibleForTesting public synchronized void addApplication(ApplicationId applicationId, String queue, String user, boolean isAppRecovering) { SchedulerApplication<FiCaSchedulerApp> application = new SchedulerApplication<FiCaSchedulerApp>(DEFAULT_QUEUE, user); applications.put(applicationId, application); metrics.submitApp(user); LOG.info("Accepted application " + applicationId + " from user: " + user + ", currently num of applications: " + applications.size()); if (isAppRecovering) { if (LOG.isDebugEnabled()) { LOG.debug(applicationId + " is recovering. Skip notifying APP_ACCEPTED"); } } else { rmContext.getDispatcher().getEventHandler() .handle(new RMAppEvent(applicationId, RMAppEventType.APP_ACCEPTED)); } } @VisibleForTesting public synchronized void addApplicationAttempt(ApplicationAttemptId appAttemptId, boolean transferStateFromPreviousAttempt, boolean isAttemptRecovering) { SchedulerApplication<FiCaSchedulerApp> application = applications.get(appAttemptId.getApplicationId()); String user = application.getUser(); // TODO: Fix store FiCaSchedulerApp schedulerApp = new FiCaSchedulerApp(appAttemptId, user, DEFAULT_QUEUE, activeUsersManager, this.rmContext); if (transferStateFromPreviousAttempt) { schedulerApp.transferStateFromPreviousAttempt(application .getCurrentAppAttempt()); } application.setCurrentAppAttempt(schedulerApp); metrics.submitAppAttempt(user); LOG.info("Added Application Attempt " + appAttemptId + " to scheduler from user " + application.getUser()); if (isAttemptRecovering) { if (LOG.isDebugEnabled()) { LOG.debug(appAttemptId + " is recovering. Skipping notifying ATTEMPT_ADDED"); } } else { rmContext.getDispatcher().getEventHandler().handle( new RMAppAttemptEvent(appAttemptId, RMAppAttemptEventType.ATTEMPT_ADDED)); } } private synchronized void doneApplication(ApplicationId applicationId, RMAppState finalState) { SchedulerApplication<FiCaSchedulerApp> application = applications.get(applicationId); if (application == null){ LOG.warn("Couldn't find application " + applicationId); return; } // Inform the activeUsersManager activeUsersManager.deactivateApplication(application.getUser(), applicationId); application.stop(finalState); applications.remove(applicationId); } private synchronized void doneApplicationAttempt( ApplicationAttemptId applicationAttemptId, RMAppAttemptState rmAppAttemptFinalState, boolean keepContainers) throws IOException { FiCaSchedulerApp attempt = getApplicationAttempt(applicationAttemptId); SchedulerApplication<FiCaSchedulerApp> application = applications.get(applicationAttemptId.getApplicationId()); if (application == null || attempt == null) { throw new IOException("Unknown application " + applicationAttemptId + " has completed!"); } // Kill all 'live' containers for (RMContainer container : attempt.getLiveContainers()) { if (keepContainers && container.getState().equals(RMContainerState.RUNNING)) { // do not kill the running container in the case of work-preserving AM // restart. LOG.info("Skip killing " + container.getContainerId()); continue; } completedContainer(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.COMPLETED_APPLICATION), RMContainerEventType.KILL); } // Clean up pending requests, metrics etc. attempt.stop(rmAppAttemptFinalState); } /** * Heart of the scheduler... * * @param node node on which resources are available to be allocated */ private void assignContainers(FiCaSchedulerNode node) { LOG.debug("assignContainers:" + " node=" + node.getRMNode().getNodeAddress() + " #applications=" + applications.size()); // Try to assign containers to applications in fifo order for (Map.Entry<ApplicationId, SchedulerApplication<FiCaSchedulerApp>> e : applications .entrySet()) { FiCaSchedulerApp application = e.getValue().getCurrentAppAttempt(); if (application == null) { continue; } LOG.debug("pre-assignContainers"); application.showRequests(); synchronized (application) { // Check if this resource is on the blacklist if (SchedulerAppUtils.isBlacklisted(application, node, LOG)) { continue; } for (Priority priority : application.getPriorities()) { int maxContainers = getMaxAllocatableContainers(application, priority, node, NodeType.OFF_SWITCH); // Ensure the application needs containers of this priority if (maxContainers > 0) { int assignedContainers = assignContainersOnNode(node, application, priority); // Do not assign out of order w.r.t priorities if (assignedContainers == 0) { break; } } } } LOG.debug("post-assignContainers"); application.showRequests(); // Done if (Resources.lessThan(resourceCalculator, clusterResource, node.getAvailableResource(), minimumAllocation)) { break; } } // Update the applications' headroom to correctly take into // account the containers assigned in this update. for (SchedulerApplication<FiCaSchedulerApp> application : applications.values()) { FiCaSchedulerApp attempt = (FiCaSchedulerApp) application.getCurrentAppAttempt(); if (attempt == null) { continue; } updateAppHeadRoom(attempt); } } private int getMaxAllocatableContainers(FiCaSchedulerApp application, Priority priority, FiCaSchedulerNode node, NodeType type) { int maxContainers = 0; ResourceRequest offSwitchRequest = application.getResourceRequest(priority, ResourceRequest.ANY); if (offSwitchRequest != null) { maxContainers = offSwitchRequest.getNumContainers(); } if (type == NodeType.OFF_SWITCH) { return maxContainers; } if (type == NodeType.RACK_LOCAL) { ResourceRequest rackLocalRequest = application.getResourceRequest(priority, node.getRMNode().getRackName()); if (rackLocalRequest == null) { return maxContainers; } maxContainers = Math.min(maxContainers, rackLocalRequest.getNumContainers()); } if (type == NodeType.NODE_LOCAL) { ResourceRequest nodeLocalRequest = application.getResourceRequest(priority, node.getRMNode().getNodeAddress()); if (nodeLocalRequest != null) { maxContainers = Math.min(maxContainers, nodeLocalRequest.getNumContainers()); } } return maxContainers; } private int assignContainersOnNode(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority ) { // Data-local int nodeLocalContainers = assignNodeLocalContainers(node, application, priority); // Rack-local int rackLocalContainers = assignRackLocalContainers(node, application, priority); // Off-switch int offSwitchContainers = assignOffSwitchContainers(node, application, priority); LOG.debug("assignContainersOnNode:" + " node=" + node.getRMNode().getNodeAddress() + " application=" + application.getApplicationId().getId() + " priority=" + priority.getPriority() + " #assigned=" + (nodeLocalContainers + rackLocalContainers + offSwitchContainers)); return (nodeLocalContainers + rackLocalContainers + offSwitchContainers); } private int assignNodeLocalContainers(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority) { int assignedContainers = 0; ResourceRequest request = application.getResourceRequest(priority, node.getNodeName()); if (request != null) { // Don't allocate on this node if we don't need containers on this rack ResourceRequest rackRequest = application.getResourceRequest(priority, node.getRMNode().getRackName()); if (rackRequest == null || rackRequest.getNumContainers() <= 0) { return 0; } int assignableContainers = Math.min( getMaxAllocatableContainers(application, priority, node, NodeType.NODE_LOCAL), request.getNumContainers()); assignedContainers = assignContainer(node, application, priority, assignableContainers, request, NodeType.NODE_LOCAL); } return assignedContainers; } private int assignRackLocalContainers(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority) { int assignedContainers = 0; ResourceRequest request = application.getResourceRequest(priority, node.getRMNode().getRackName()); if (request != null) { // Don't allocate on this rack if the application doens't need containers ResourceRequest offSwitchRequest = application.getResourceRequest(priority, ResourceRequest.ANY); if (offSwitchRequest.getNumContainers() <= 0) { return 0; } int assignableContainers = Math.min( getMaxAllocatableContainers(application, priority, node, NodeType.RACK_LOCAL), request.getNumContainers()); assignedContainers = assignContainer(node, application, priority, assignableContainers, request, NodeType.RACK_LOCAL); } return assignedContainers; } private int assignOffSwitchContainers(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority) { int assignedContainers = 0; ResourceRequest request = application.getResourceRequest(priority, ResourceRequest.ANY); if (request != null) { assignedContainers = assignContainer(node, application, priority, request.getNumContainers(), request, NodeType.OFF_SWITCH); } return assignedContainers; } private int assignContainer(FiCaSchedulerNode node, FiCaSchedulerApp application, Priority priority, int assignableContainers, ResourceRequest request, NodeType type) { LOG.debug("assignContainers:" + " node=" + node.getRMNode().getNodeAddress() + " application=" + application.getApplicationId().getId() + " priority=" + priority.getPriority() + " assignableContainers=" + assignableContainers + " request=" + request + " type=" + type); Resource capability = request.getCapability(); int availableContainers = node.getAvailableResource().getMemory() / capability.getMemory(); // TODO: A buggy // application // with this // zero would // crash the // scheduler. int assignedContainers = Math.min(assignableContainers, availableContainers); if (assignedContainers > 0) { for (int i=0; i < assignedContainers; ++i) { NodeId nodeId = node.getRMNode().getNodeID(); ContainerId containerId = BuilderUtils.newContainerId(application .getApplicationAttemptId(), application.getNewContainerId()); // Create the container Container container = BuilderUtils.newContainer(containerId, nodeId, node.getRMNode() .getHttpAddress(), capability, priority, null); // Allocate! // Inform the application RMContainer rmContainer = application.allocate(type, node, priority, request, container); // Inform the node node.allocateContainer(rmContainer); // Update usage for this container increaseUsedResources(rmContainer); } } return assignedContainers; } private synchronized void nodeUpdate(RMNode rmNode) { FiCaSchedulerNode node = getNode(rmNode.getNodeID()); // Update resource if any change SchedulerUtils.updateResourceIfChanged(node, rmNode, clusterResource, LOG); List<UpdatedContainerInfo> containerInfoList = rmNode.pullContainerUpdates(); List<ContainerStatus> newlyLaunchedContainers = new ArrayList<ContainerStatus>(); List<ContainerStatus> completedContainers = new ArrayList<ContainerStatus>(); for(UpdatedContainerInfo containerInfo : containerInfoList) { newlyLaunchedContainers.addAll(containerInfo.getNewlyLaunchedContainers()); completedContainers.addAll(containerInfo.getCompletedContainers()); } // Processing the newly launched containers for (ContainerStatus launchedContainer : newlyLaunchedContainers) { containerLaunchedOnNode(launchedContainer.getContainerId(), node); } // Process completed containers for (ContainerStatus completedContainer : completedContainers) { ContainerId containerId = completedContainer.getContainerId(); LOG.debug("Container FINISHED: " + containerId); completedContainer(getRMContainer(containerId), completedContainer, RMContainerEventType.FINISHED); } if (Resources.greaterThanOrEqual(resourceCalculator, clusterResource, node.getAvailableResource(),minimumAllocation)) { LOG.debug("Node heartbeat " + rmNode.getNodeID() + " available resource = " + node.getAvailableResource()); assignContainers(node); LOG.debug("Node after allocation " + rmNode.getNodeID() + " resource = " + node.getAvailableResource()); } updateAvailableResourcesMetrics(); } private void increaseUsedResources(RMContainer rmContainer) { Resources.addTo(usedResource, rmContainer.getAllocatedResource()); } private void updateAppHeadRoom(SchedulerApplicationAttempt schedulerAttempt) { schedulerAttempt.setHeadroom(Resources.subtract(clusterResource, usedResource)); } private void updateAvailableResourcesMetrics() { metrics.setAvailableResourcesToQueue(Resources.subtract(clusterResource, usedResource)); } @Override public void handle(SchedulerEvent event) { switch(event.getType()) { case NODE_ADDED: { NodeAddedSchedulerEvent nodeAddedEvent = (NodeAddedSchedulerEvent)event; addNode(nodeAddedEvent.getAddedRMNode()); recoverContainersOnNode(nodeAddedEvent.getContainerReports(), nodeAddedEvent.getAddedRMNode()); } break; case NODE_REMOVED: { NodeRemovedSchedulerEvent nodeRemovedEvent = (NodeRemovedSchedulerEvent)event; removeNode(nodeRemovedEvent.getRemovedRMNode()); } break; case NODE_UPDATE: { NodeUpdateSchedulerEvent nodeUpdatedEvent = (NodeUpdateSchedulerEvent)event; nodeUpdate(nodeUpdatedEvent.getRMNode()); } break; case APP_ADDED: { AppAddedSchedulerEvent appAddedEvent = (AppAddedSchedulerEvent) event; addApplication(appAddedEvent.getApplicationId(), appAddedEvent.getQueue(), appAddedEvent.getUser(), appAddedEvent.getIsAppRecovering()); } break; case APP_REMOVED: { AppRemovedSchedulerEvent appRemovedEvent = (AppRemovedSchedulerEvent)event; doneApplication(appRemovedEvent.getApplicationID(), appRemovedEvent.getFinalState()); } break; case APP_ATTEMPT_ADDED: { AppAttemptAddedSchedulerEvent appAttemptAddedEvent = (AppAttemptAddedSchedulerEvent) event; addApplicationAttempt(appAttemptAddedEvent.getApplicationAttemptId(), appAttemptAddedEvent.getTransferStateFromPreviousAttempt(), appAttemptAddedEvent.getIsAttemptRecovering()); } break; case APP_ATTEMPT_REMOVED: { AppAttemptRemovedSchedulerEvent appAttemptRemovedEvent = (AppAttemptRemovedSchedulerEvent) event; try { doneApplicationAttempt( appAttemptRemovedEvent.getApplicationAttemptID(), appAttemptRemovedEvent.getFinalAttemptState(), appAttemptRemovedEvent.getKeepContainersAcrossAppAttempts()); } catch(IOException ie) { LOG.error("Unable to remove application " + appAttemptRemovedEvent.getApplicationAttemptID(), ie); } } break; case CONTAINER_EXPIRED: { ContainerExpiredSchedulerEvent containerExpiredEvent = (ContainerExpiredSchedulerEvent) event; ContainerId containerid = containerExpiredEvent.getContainerId(); completedContainer(getRMContainer(containerid), SchedulerUtils.createAbnormalContainerStatus( containerid, SchedulerUtils.EXPIRED_CONTAINER), RMContainerEventType.EXPIRE); } break; default: LOG.error("Invalid eventtype " + event.getType() + ". Ignoring!"); } } @Lock(FifoScheduler.class) @Override protected synchronized void completedContainer(RMContainer rmContainer, ContainerStatus containerStatus, RMContainerEventType event) { if (rmContainer == null) { LOG.info("Null container completed..."); return; } // Get the application for the finished container Container container = rmContainer.getContainer(); FiCaSchedulerApp application = getCurrentAttemptForContainer(container.getId()); ApplicationId appId = container.getId().getApplicationAttemptId().getApplicationId(); // Get the node on which the container was allocated FiCaSchedulerNode node = getNode(container.getNodeId()); if (application == null) { LOG.info("Unknown application: " + appId + " released container " + container.getId() + " on node: " + node + " with event: " + event); return; } // Inform the application application.containerCompleted(rmContainer, containerStatus, event); // Inform the node node.releaseContainer(container); // Update total usage Resources.subtractFrom(usedResource, container.getResource()); LOG.info("Application attempt " + application.getApplicationAttemptId() + " released container " + container.getId() + " on node: " + node + " with event: " + event); } private Resource usedResource = recordFactory.newRecordInstance(Resource.class); private synchronized void removeNode(RMNode nodeInfo) { FiCaSchedulerNode node = getNode(nodeInfo.getNodeID()); if (node == null) { return; } // Kill running containers for(RMContainer container : node.getRunningContainers()) { completedContainer(container, SchedulerUtils.createAbnormalContainerStatus( container.getContainerId(), SchedulerUtils.LOST_CONTAINER), RMContainerEventType.KILL); } //Remove the node this.nodes.remove(nodeInfo.getNodeID()); // Update cluster metrics Resources.subtractFrom(clusterResource, node.getRMNode().getTotalCapability()); } @Override public QueueInfo getQueueInfo(String queueName, boolean includeChildQueues, boolean recursive) { return DEFAULT_QUEUE.getQueueInfo(false, false); } @Override public List<QueueUserACLInfo> getQueueUserAclInfo() { return DEFAULT_QUEUE.getQueueUserAclInfo(null); } private synchronized void addNode(RMNode nodeManager) { this.nodes.put(nodeManager.getNodeID(), new FiCaSchedulerNode(nodeManager, usePortForNodeName)); Resources.addTo(clusterResource, nodeManager.getTotalCapability()); } @Override public void recover(RMState state) { // NOT IMPLEMENTED } @Override public RMContainer getRMContainer(ContainerId containerId) { FiCaSchedulerApp attempt = getCurrentAttemptForContainer(containerId); return (attempt == null) ? null : attempt.getRMContainer(containerId); } @Override public QueueMetrics getRootQueueMetrics() { return DEFAULT_QUEUE.getMetrics(); } @Override public synchronized boolean checkAccess(UserGroupInformation callerUGI, QueueACL acl, String queueName) { return DEFAULT_QUEUE.hasAccess(acl, callerUGI); } @Override public synchronized List<ApplicationAttemptId> getAppsInQueue(String queueName) { if (queueName.equals(DEFAULT_QUEUE.getQueueName())) { List<ApplicationAttemptId> attempts = new ArrayList<ApplicationAttemptId>(applications.size()); for (SchedulerApplication<FiCaSchedulerApp> app : applications.values()) { attempts.add(app.getCurrentAppAttempt().getApplicationAttemptId()); } return attempts; } else { return null; } } public Resource getUsedResource() { return usedResource; } }
openjdk/jdk8
35,215
jdk/src/share/classes/java/util/concurrent/ConcurrentLinkedQueue.java
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Written by Doug Lea and Martin Buchholz with assistance from members of * JCP JSR-166 Expert Group and released to the public domain, as explained * at http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent; import java.util.AbstractQueue; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Queue; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.Consumer; /** * An unbounded thread-safe {@linkplain Queue queue} based on linked nodes. * This queue orders elements FIFO (first-in-first-out). * The <em>head</em> of the queue is that element that has been on the * queue the longest time. * The <em>tail</em> of the queue is that element that has been on the * queue the shortest time. New elements * are inserted at the tail of the queue, and the queue retrieval * operations obtain elements at the head of the queue. * A {@code ConcurrentLinkedQueue} is an appropriate choice when * many threads will share access to a common collection. * Like most other concurrent collection implementations, this class * does not permit the use of {@code null} elements. * * <p>This implementation employs an efficient <em>non-blocking</em> * algorithm based on one described in <a * href="http://www.cs.rochester.edu/u/michael/PODC96.html"> Simple, * Fast, and Practical Non-Blocking and Blocking Concurrent Queue * Algorithms</a> by Maged M. Michael and Michael L. Scott. * * <p>Iterators are <i>weakly consistent</i>, returning elements * reflecting the state of the queue at some point at or since the * creation of the iterator. They do <em>not</em> throw {@link * java.util.ConcurrentModificationException}, and may proceed concurrently * with other operations. Elements contained in the queue since the creation * of the iterator will be returned exactly once. * * <p>Beware that, unlike in most collections, the {@code size} method * is <em>NOT</em> a constant-time operation. Because of the * asynchronous nature of these queues, determining the current number * of elements requires a traversal of the elements, and so may report * inaccurate results if this collection is modified during traversal. * Additionally, the bulk operations {@code addAll}, * {@code removeAll}, {@code retainAll}, {@code containsAll}, * {@code equals}, and {@code toArray} are <em>not</em> guaranteed * to be performed atomically. For example, an iterator operating * concurrently with an {@code addAll} operation might view only some * of the added elements. * * <p>This class and its iterator implement all of the <em>optional</em> * methods of the {@link Queue} and {@link Iterator} interfaces. * * <p>Memory consistency effects: As with other concurrent * collections, actions in a thread prior to placing an object into a * {@code ConcurrentLinkedQueue} * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a> * actions subsequent to the access or removal of that element from * the {@code ConcurrentLinkedQueue} in another thread. * * <p>This class is a member of the * <a href="{@docRoot}/../technotes/guides/collections/index.html"> * Java Collections Framework</a>. * * @since 1.5 * @author Doug Lea * @param <E> the type of elements held in this collection */ public class ConcurrentLinkedQueue<E> extends AbstractQueue<E> implements Queue<E>, java.io.Serializable { private static final long serialVersionUID = 196745693267521676L; /* * This is a modification of the Michael & Scott algorithm, * adapted for a garbage-collected environment, with support for * interior node deletion (to support remove(Object)). For * explanation, read the paper. * * Note that like most non-blocking algorithms in this package, * this implementation relies on the fact that in garbage * collected systems, there is no possibility of ABA problems due * to recycled nodes, so there is no need to use "counted * pointers" or related techniques seen in versions used in * non-GC'ed settings. * * The fundamental invariants are: * - There is exactly one (last) Node with a null next reference, * which is CASed when enqueueing. This last Node can be * reached in O(1) time from tail, but tail is merely an * optimization - it can always be reached in O(N) time from * head as well. * - The elements contained in the queue are the non-null items in * Nodes that are reachable from head. CASing the item * reference of a Node to null atomically removes it from the * queue. Reachability of all elements from head must remain * true even in the case of concurrent modifications that cause * head to advance. A dequeued Node may remain in use * indefinitely due to creation of an Iterator or simply a * poll() that has lost its time slice. * * The above might appear to imply that all Nodes are GC-reachable * from a predecessor dequeued Node. That would cause two problems: * - allow a rogue Iterator to cause unbounded memory retention * - cause cross-generational linking of old Nodes to new Nodes if * a Node was tenured while live, which generational GCs have a * hard time dealing with, causing repeated major collections. * However, only non-deleted Nodes need to be reachable from * dequeued Nodes, and reachability does not necessarily have to * be of the kind understood by the GC. We use the trick of * linking a Node that has just been dequeued to itself. Such a * self-link implicitly means to advance to head. * * Both head and tail are permitted to lag. In fact, failing to * update them every time one could is a significant optimization * (fewer CASes). As with LinkedTransferQueue (see the internal * documentation for that class), we use a slack threshold of two; * that is, we update head/tail when the current pointer appears * to be two or more steps away from the first/last node. * * Since head and tail are updated concurrently and independently, * it is possible for tail to lag behind head (why not)? * * CASing a Node's item reference to null atomically removes the * element from the queue. Iterators skip over Nodes with null * items. Prior implementations of this class had a race between * poll() and remove(Object) where the same element would appear * to be successfully removed by two concurrent operations. The * method remove(Object) also lazily unlinks deleted Nodes, but * this is merely an optimization. * * When constructing a Node (before enqueuing it) we avoid paying * for a volatile write to item by using Unsafe.putObject instead * of a normal write. This allows the cost of enqueue to be * "one-and-a-half" CASes. * * Both head and tail may or may not point to a Node with a * non-null item. If the queue is empty, all items must of course * be null. Upon creation, both head and tail refer to a dummy * Node with null item. Both head and tail are only updated using * CAS, so they never regress, although again this is merely an * optimization. */ private static class Node<E> { volatile E item; volatile Node<E> next; /** * Constructs a new node. Uses relaxed write because item can * only be seen after publication via casNext. */ Node(E item) { UNSAFE.putObject(this, itemOffset, item); } boolean casItem(E cmp, E val) { return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val); } void lazySetNext(Node<E> val) { UNSAFE.putOrderedObject(this, nextOffset, val); } boolean casNext(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long itemOffset; private static final long nextOffset; static { try { UNSAFE = sun.misc.Unsafe.getUnsafe(); Class<?> k = Node.class; itemOffset = UNSAFE.objectFieldOffset (k.getDeclaredField("item")); nextOffset = UNSAFE.objectFieldOffset (k.getDeclaredField("next")); } catch (Exception e) { throw new Error(e); } } } /** * A node from which the first live (non-deleted) node (if any) * can be reached in O(1) time. * Invariants: * - all live nodes are reachable from head via succ() * - head != null * - (tmp = head).next != tmp || tmp != head * Non-invariants: * - head.item may or may not be null. * - it is permitted for tail to lag behind head, that is, for tail * to not be reachable from head! */ private transient volatile Node<E> head; /** * A node from which the last node on list (that is, the unique * node with node.next == null) can be reached in O(1) time. * Invariants: * - the last node is always reachable from tail via succ() * - tail != null * Non-invariants: * - tail.item may or may not be null. * - it is permitted for tail to lag behind head, that is, for tail * to not be reachable from head! * - tail.next may or may not be self-pointing to tail. */ private transient volatile Node<E> tail; /** * Creates a {@code ConcurrentLinkedQueue} that is initially empty. */ public ConcurrentLinkedQueue() { head = tail = new Node<E>(null); } /** * Creates a {@code ConcurrentLinkedQueue} * initially containing the elements of the given collection, * added in traversal order of the collection's iterator. * * @param c the collection of elements to initially contain * @throws NullPointerException if the specified collection or any * of its elements are null */ public ConcurrentLinkedQueue(Collection<? extends E> c) { Node<E> h = null, t = null; for (E e : c) { checkNotNull(e); Node<E> newNode = new Node<E>(e); if (h == null) h = t = newNode; else { t.lazySetNext(newNode); t = newNode; } } if (h == null) h = t = new Node<E>(null); head = h; tail = t; } // Have to override just to update the javadoc /** * Inserts the specified element at the tail of this queue. * As the queue is unbounded, this method will never throw * {@link IllegalStateException} or return {@code false}. * * @return {@code true} (as specified by {@link Collection#add}) * @throws NullPointerException if the specified element is null */ public boolean add(E e) { return offer(e); } /** * Tries to CAS head to p. If successful, repoint old head to itself * as sentinel for succ(), below. */ final void updateHead(Node<E> h, Node<E> p) { if (h != p && casHead(h, p)) h.lazySetNext(h); } /** * Returns the successor of p, or the head node if p.next has been * linked to self, which will only be true if traversing with a * stale pointer that is now off the list. */ final Node<E> succ(Node<E> p) { Node<E> next = p.next; return (p == next) ? head : next; } /** * Inserts the specified element at the tail of this queue. * As the queue is unbounded, this method will never return {@code false}. * * @return {@code true} (as specified by {@link Queue#offer}) * @throws NullPointerException if the specified element is null */ public boolean offer(E e) { checkNotNull(e); final Node<E> newNode = new Node<E>(e); for (Node<E> t = tail, p = t;;) { Node<E> q = p.next; if (q == null) { // p is last node if (p.casNext(null, newNode)) { // Successful CAS is the linearization point // for e to become an element of this queue, // and for newNode to become "live". if (p != t) // hop two nodes at a time casTail(t, newNode); // Failure is OK. return true; } // Lost CAS race to another thread; re-read next } else if (p == q) // We have fallen off list. If tail is unchanged, it // will also be off-list, in which case we need to // jump to head, from which all live nodes are always // reachable. Else the new tail is a better bet. p = (t != (t = tail)) ? t : head; else // Check for tail updates after two hops. p = (p != t && t != (t = tail)) ? t : q; } } public E poll() { restartFromHead: for (;;) { for (Node<E> h = head, p = h, q;;) { E item = p.item; if (item != null && p.casItem(item, null)) { // Successful CAS is the linearization point // for item to be removed from this queue. if (p != h) // hop two nodes at a time updateHead(h, ((q = p.next) != null) ? q : p); return item; } else if ((q = p.next) == null) { updateHead(h, p); return null; } else if (p == q) continue restartFromHead; else p = q; } } } public E peek() { restartFromHead: for (;;) { for (Node<E> h = head, p = h, q;;) { E item = p.item; if (item != null || (q = p.next) == null) { updateHead(h, p); return item; } else if (p == q) continue restartFromHead; else p = q; } } } /** * Returns the first live (non-deleted) node on list, or null if none. * This is yet another variant of poll/peek; here returning the * first node, not element. We could make peek() a wrapper around * first(), but that would cost an extra volatile read of item, * and the need to add a retry loop to deal with the possibility * of losing a race to a concurrent poll(). */ Node<E> first() { restartFromHead: for (;;) { for (Node<E> h = head, p = h, q;;) { boolean hasItem = (p.item != null); if (hasItem || (q = p.next) == null) { updateHead(h, p); return hasItem ? p : null; } else if (p == q) continue restartFromHead; else p = q; } } } /** * Returns {@code true} if this queue contains no elements. * * @return {@code true} if this queue contains no elements */ public boolean isEmpty() { return first() == null; } /** * Returns the number of elements in this queue. If this queue * contains more than {@code Integer.MAX_VALUE} elements, returns * {@code Integer.MAX_VALUE}. * * <p>Beware that, unlike in most collections, this method is * <em>NOT</em> a constant-time operation. Because of the * asynchronous nature of these queues, determining the current * number of elements requires an O(n) traversal. * Additionally, if elements are added or removed during execution * of this method, the returned result may be inaccurate. Thus, * this method is typically not very useful in concurrent * applications. * * @return the number of elements in this queue */ public int size() { int count = 0; for (Node<E> p = first(); p != null; p = succ(p)) if (p.item != null) // Collection.size() spec says to max out if (++count == Integer.MAX_VALUE) break; return count; } /** * Returns {@code true} if this queue contains the specified element. * More formally, returns {@code true} if and only if this queue contains * at least one element {@code e} such that {@code o.equals(e)}. * * @param o object to be checked for containment in this queue * @return {@code true} if this queue contains the specified element */ public boolean contains(Object o) { if (o == null) return false; for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null && o.equals(item)) return true; } return false; } /** * Removes a single instance of the specified element from this queue, * if it is present. More formally, removes an element {@code e} such * that {@code o.equals(e)}, if this queue contains one or more such * elements. * Returns {@code true} if this queue contained the specified element * (or equivalently, if this queue changed as a result of the call). * * @param o element to be removed from this queue, if present * @return {@code true} if this queue changed as a result of the call */ public boolean remove(Object o) { if (o == null) return false; Node<E> pred = null; for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null && o.equals(item) && p.casItem(item, null)) { Node<E> next = succ(p); if (pred != null && next != null) pred.casNext(p, next); return true; } pred = p; } return false; } /** * Appends all of the elements in the specified collection to the end of * this queue, in the order that they are returned by the specified * collection's iterator. Attempts to {@code addAll} of a queue to * itself result in {@code IllegalArgumentException}. * * @param c the elements to be inserted into this queue * @return {@code true} if this queue changed as a result of the call * @throws NullPointerException if the specified collection or any * of its elements are null * @throws IllegalArgumentException if the collection is this queue */ public boolean addAll(Collection<? extends E> c) { if (c == this) // As historically specified in AbstractQueue#addAll throw new IllegalArgumentException(); // Copy c into a private chain of Nodes Node<E> beginningOfTheEnd = null, last = null; for (E e : c) { checkNotNull(e); Node<E> newNode = new Node<E>(e); if (beginningOfTheEnd == null) beginningOfTheEnd = last = newNode; else { last.lazySetNext(newNode); last = newNode; } } if (beginningOfTheEnd == null) return false; // Atomically append the chain at the tail of this collection for (Node<E> t = tail, p = t;;) { Node<E> q = p.next; if (q == null) { // p is last node if (p.casNext(null, beginningOfTheEnd)) { // Successful CAS is the linearization point // for all elements to be added to this queue. if (!casTail(t, last)) { // Try a little harder to update tail, // since we may be adding many elements. t = tail; if (last.next == null) casTail(t, last); } return true; } // Lost CAS race to another thread; re-read next } else if (p == q) // We have fallen off list. If tail is unchanged, it // will also be off-list, in which case we need to // jump to head, from which all live nodes are always // reachable. Else the new tail is a better bet. p = (t != (t = tail)) ? t : head; else // Check for tail updates after two hops. p = (p != t && t != (t = tail)) ? t : q; } } /** * Returns an array containing all of the elements in this queue, in * proper sequence. * * <p>The returned array will be "safe" in that no references to it are * maintained by this queue. (In other words, this method must allocate * a new array). The caller is thus free to modify the returned array. * * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this queue */ public Object[] toArray() { // Use ArrayList to deal with resizing. ArrayList<E> al = new ArrayList<E>(); for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null) al.add(item); } return al.toArray(); } /** * Returns an array containing all of the elements in this queue, in * proper sequence; the runtime type of the returned array is that of * the specified array. If the queue fits in the specified array, it * is returned therein. Otherwise, a new array is allocated with the * runtime type of the specified array and the size of this queue. * * <p>If this queue fits in the specified array with room to spare * (i.e., the array has more elements than this queue), the element in * the array immediately following the end of the queue is set to * {@code null}. * * <p>Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, * under certain circumstances, be used to save allocation costs. * * <p>Suppose {@code x} is a queue known to contain only strings. * The following code can be used to dump the queue into a newly * allocated array of {@code String}: * * <pre> {@code String[] y = x.toArray(new String[0]);}</pre> * * Note that {@code toArray(new Object[0])} is identical in function to * {@code toArray()}. * * @param a the array into which the elements of the queue are to * be stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose * @return an array containing all of the elements in this queue * @throws ArrayStoreException if the runtime type of the specified array * is not a supertype of the runtime type of every element in * this queue * @throws NullPointerException if the specified array is null */ @SuppressWarnings("unchecked") public <T> T[] toArray(T[] a) { // try to use sent-in array int k = 0; Node<E> p; for (p = first(); p != null && k < a.length; p = succ(p)) { E item = p.item; if (item != null) a[k++] = (T)item; } if (p == null) { if (k < a.length) a[k] = null; return a; } // If won't fit, use ArrayList version ArrayList<E> al = new ArrayList<E>(); for (Node<E> q = first(); q != null; q = succ(q)) { E item = q.item; if (item != null) al.add(item); } return al.toArray(a); } /** * Returns an iterator over the elements in this queue in proper sequence. * The elements will be returned in order from first (head) to last (tail). * * <p>The returned iterator is * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>. * * @return an iterator over the elements in this queue in proper sequence */ public Iterator<E> iterator() { return new Itr(); } private class Itr implements Iterator<E> { /** * Next node to return item for. */ private Node<E> nextNode; /** * nextItem holds on to item fields because once we claim * that an element exists in hasNext(), we must return it in * the following next() call even if it was in the process of * being removed when hasNext() was called. */ private E nextItem; /** * Node of the last returned item, to support remove. */ private Node<E> lastRet; Itr() { advance(); } /** * Moves to next valid node and returns item to return for * next(), or null if no such. */ private E advance() { lastRet = nextNode; E x = nextItem; Node<E> pred, p; if (nextNode == null) { p = first(); pred = null; } else { pred = nextNode; p = succ(nextNode); } for (;;) { if (p == null) { nextNode = null; nextItem = null; return x; } E item = p.item; if (item != null) { nextNode = p; nextItem = item; return x; } else { // skip over nulls Node<E> next = succ(p); if (pred != null && next != null) pred.casNext(p, next); p = next; } } } public boolean hasNext() { return nextNode != null; } public E next() { if (nextNode == null) throw new NoSuchElementException(); return advance(); } public void remove() { Node<E> l = lastRet; if (l == null) throw new IllegalStateException(); // rely on a future traversal to relink. l.item = null; lastRet = null; } } /** * Saves this queue to a stream (that is, serializes it). * * @param s the stream * @throws java.io.IOException if an I/O error occurs * @serialData All of the elements (each an {@code E}) in * the proper order, followed by a null */ private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { // Write out any hidden stuff s.defaultWriteObject(); // Write out all elements in the proper order. for (Node<E> p = first(); p != null; p = succ(p)) { Object item = p.item; if (item != null) s.writeObject(item); } // Use trailing null as sentinel s.writeObject(null); } /** * Reconstitutes this queue from a stream (that is, deserializes it). * @param s the stream * @throws ClassNotFoundException if the class of a serialized object * could not be found * @throws java.io.IOException if an I/O error occurs */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); // Read in elements until trailing null sentinel found Node<E> h = null, t = null; Object item; while ((item = s.readObject()) != null) { @SuppressWarnings("unchecked") Node<E> newNode = new Node<E>((E) item); if (h == null) h = t = newNode; else { t.lazySetNext(newNode); t = newNode; } } if (h == null) h = t = new Node<E>(null); head = h; tail = t; } /** A customized variant of Spliterators.IteratorSpliterator */ static final class CLQSpliterator<E> implements Spliterator<E> { static final int MAX_BATCH = 1 << 25; // max batch array size; final ConcurrentLinkedQueue<E> queue; Node<E> current; // current node; null until initialized int batch; // batch size for splits boolean exhausted; // true when no more nodes CLQSpliterator(ConcurrentLinkedQueue<E> queue) { this.queue = queue; } public Spliterator<E> trySplit() { Node<E> p; final ConcurrentLinkedQueue<E> q = this.queue; int b = batch; int n = (b <= 0) ? 1 : (b >= MAX_BATCH) ? MAX_BATCH : b + 1; if (!exhausted && ((p = current) != null || (p = q.first()) != null) && p.next != null) { Object[] a = new Object[n]; int i = 0; do { if ((a[i] = p.item) != null) ++i; if (p == (p = p.next)) p = q.first(); } while (p != null && i < n); if ((current = p) == null) exhausted = true; if (i > 0) { batch = i; return Spliterators.spliterator (a, 0, i, Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT); } } return null; } public void forEachRemaining(Consumer<? super E> action) { Node<E> p; if (action == null) throw new NullPointerException(); final ConcurrentLinkedQueue<E> q = this.queue; if (!exhausted && ((p = current) != null || (p = q.first()) != null)) { exhausted = true; do { E e = p.item; if (p == (p = p.next)) p = q.first(); if (e != null) action.accept(e); } while (p != null); } } public boolean tryAdvance(Consumer<? super E> action) { Node<E> p; if (action == null) throw new NullPointerException(); final ConcurrentLinkedQueue<E> q = this.queue; if (!exhausted && ((p = current) != null || (p = q.first()) != null)) { E e; do { e = p.item; if (p == (p = p.next)) p = q.first(); } while (e == null && p != null); if ((current = p) == null) exhausted = true; if (e != null) { action.accept(e); return true; } } return false; } public long estimateSize() { return Long.MAX_VALUE; } public int characteristics() { return Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT; } } /** * Returns a {@link Spliterator} over the elements in this queue. * * <p>The returned spliterator is * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>. * * <p>The {@code Spliterator} reports {@link Spliterator#CONCURRENT}, * {@link Spliterator#ORDERED}, and {@link Spliterator#NONNULL}. * * @implNote * The {@code Spliterator} implements {@code trySplit} to permit limited * parallelism. * * @return a {@code Spliterator} over the elements in this queue * @since 1.8 */ @Override public Spliterator<E> spliterator() { return new CLQSpliterator<E>(this); } /** * Throws NullPointerException if argument is null. * * @param v the element */ private static void checkNotNull(Object v) { if (v == null) throw new NullPointerException(); } private boolean casTail(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, tailOffset, cmp, val); } private boolean casHead(Node<E> cmp, Node<E> val) { return UNSAFE.compareAndSwapObject(this, headOffset, cmp, val); } // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long headOffset; private static final long tailOffset; static { try { UNSAFE = sun.misc.Unsafe.getUnsafe(); Class<?> k = ConcurrentLinkedQueue.class; headOffset = UNSAFE.objectFieldOffset (k.getDeclaredField("head")); tailOffset = UNSAFE.objectFieldOffset (k.getDeclaredField("tail")); } catch (Exception e) { throw new Error(e); } } }
apache/felix-dev
32,091
framework/src/main/java/org/osgi/framework/AdminPermission.java
/* * Copyright (c) OSGi Alliance (2000, 2017). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.framework; import java.io.IOException; import java.io.NotSerializableException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.ObjectStreamField; import java.security.AccessController; import java.security.BasicPermission; import java.security.Permission; import java.security.PermissionCollection; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; /** * A bundle's authority to perform specific privileged administrative operations * on or to get sensitive information about a bundle. The actions for this * permission are: * * <pre> * Action Methods * class Bundle.loadClass * execute Bundle.start * Bundle.stop * BundleStartLevel.setStartLevel * extensionLifecycle BundleContext.installBundle for extension bundles * Bundle.update for extension bundles * Bundle.uninstall for extension bundles * lifecycle BundleContext.installBundle * Bundle.update * Bundle.uninstall * listener BundleContext.addBundleListener for * SynchronousBundleListener * BundleContext.removeBundleListener for * SynchronousBundleListener * metadata Bundle.getHeaders * Bundle.getLocation * resolve FrameworkWiring.refreshBundles * FrameworkWiring.resolveBundles * resource Bundle.getResource * Bundle.getResources * Bundle.getEntry * Bundle.getEntryPaths * Bundle.findEntries * Bundle resource/entry URL creation * startlevel FrameworkStartLevel.setStartLevel * FrameworkStartLevel.setInitialBundleStartLevel * context Bundle.getBundleContext * weave WovenClass.getBytes * WovenClass.setBytes * WovenClass.getDynamicImports for modification * </pre> * * <p> * The special action &quot;*&quot; will represent all actions. The * {@code resolve} action is implied by the {@code class}, {@code execute} and * {@code resource} actions. * <p> * The name of this permission is a filter expression. The filter gives access * to the following attributes: * <ul> * <li>signer - A Distinguished Name chain used to sign a bundle. Wildcards in a * DN are not matched according to the filter string rules, but according to the * rules defined for a DN chain.</li> * <li>location - The location of a bundle.</li> * <li>id - The bundle ID of the designated bundle.</li> * <li>name - The symbolic name of a bundle.</li> * </ul> * Filter attribute names are processed in a case sensitive manner. * * @ThreadSafe * @author $Id: 7906054ba14028f4c0dc21610dfd8b86ae3ffa00 $ */ public final class AdminPermission extends BasicPermission { static final long serialVersionUID = 307051004521261705L; /** * The action string {@code class}. The {@code class} action implies the * {@code resolve} action. * * @since 1.3 */ public final static String CLASS = "class"; /** * The action string {@code execute}. The {@code execute} action implies the * {@code resolve} action. * * @since 1.3 */ public final static String EXECUTE = "execute"; /** * The action string {@code extensionLifecycle}. * * @since 1.3 */ public final static String EXTENSIONLIFECYCLE = "extensionLifecycle"; /** * The action string {@code lifecycle}. * * @since 1.3 */ public final static String LIFECYCLE = "lifecycle"; /** * The action string {@code listener}. * * @since 1.3 */ public final static String LISTENER = "listener"; /** * The action string {@code metadata}. * * @since 1.3 */ public final static String METADATA = "metadata"; /** * The action string {@code resolve}. The {@code resolve} action is implied * by the {@code class}, {@code execute} and {@code resource} actions. * * @since 1.3 */ public final static String RESOLVE = "resolve"; /** * The action string {@code resource}. The {@code resource} action implies * the {@code resolve} action. * * @since 1.3 */ public final static String RESOURCE = "resource"; /** * The action string {@code startlevel}. * * @since 1.3 */ public final static String STARTLEVEL = "startlevel"; /** * The action string {@code context}. * * @since 1.4 */ public final static String CONTEXT = "context"; /** * The action string {@code weave}. * * @since 1.6 */ public final static String WEAVE = "weave"; private final static int ACTION_CLASS = 0x00000001; private final static int ACTION_EXECUTE = 0x00000002; private final static int ACTION_LIFECYCLE = 0x00000004; private final static int ACTION_LISTENER = 0x00000008; private final static int ACTION_METADATA = 0x00000010; private final static int ACTION_RESOLVE = 0x00000040; private final static int ACTION_RESOURCE = 0x00000080; private final static int ACTION_STARTLEVEL = 0x00000100; private final static int ACTION_EXTENSIONLIFECYCLE = 0x00000200; private final static int ACTION_CONTEXT = 0x00000400; private final static int ACTION_WEAVE = 0x00000800; private final static int ACTION_ALL = ACTION_CLASS | ACTION_EXECUTE | ACTION_LIFECYCLE | ACTION_LISTENER | ACTION_METADATA | ACTION_RESOLVE | ACTION_RESOURCE | ACTION_STARTLEVEL | ACTION_EXTENSIONLIFECYCLE | ACTION_CONTEXT | ACTION_WEAVE; final static int ACTION_NONE = 0; /** * The actions in canonical form. * * @serial */ private volatile String actions = null; /** * The actions mask. */ transient int action_mask; /** * If this AdminPermission was constructed with a filter, this holds a * Filter matching object used to evaluate the filter in implies. */ transient Filter filter; /** * The bundle governed by this AdminPermission - only used if filter == null */ transient final Bundle bundle; /** * This map holds the properties of the permission, used to match a filter * in implies. This is not initialized until necessary, and then cached in * this object. */ private transient volatile Map<String, Object> properties; /** * ThreadLocal used to determine if we have recursively called * getProperties. */ private static final ThreadLocal<Bundle> recurse = new ThreadLocal<Bundle>(); /** * Creates a new {@code AdminPermission} object that matches all bundles and * has all actions. Equivalent to AdminPermission("*","*"); */ public AdminPermission() { this(null, ACTION_ALL); } /** * Create a new AdminPermission. * * This constructor must only be used to create a permission that is going * to be checked. * <p> * Examples: * * <pre> * (signer=\*,o=ACME,c=US) * (&amp;(signer=\*,o=ACME,c=US)(name=com.acme.*) * (location=http://www.acme.com/bundles/*)) * (id&gt;=1) * </pre> * * <p> * When a signer key is used within the filter expression the signer value * must escape the special filter chars ('*', '(', ')'). * <p> * Null arguments are equivalent to "*". * * @param filter A filter expression that can use signer, location, id, and * name keys. A value of &quot;*&quot; or {@code null} matches all * bundle. Filter attribute names are processed in a case sensitive * manner. * @param actions {@code class}, {@code execute}, {@code extensionLifecycle} * , {@code lifecycle}, {@code listener}, {@code metadata}, * {@code resolve} , {@code resource}, {@code startlevel}, * {@code context} or {@code weave}. A value of "*" or {@code null} * indicates all actions. * @throws IllegalArgumentException If the filter has an invalid syntax. */ public AdminPermission(String filter, String actions) { // arguments will be null if called from a PermissionInfo defined with // no args this(parseFilter(filter), parseActions(actions)); } /** * Creates a new requested {@code AdminPermission} object to be used by the * code that must perform {@code checkPermission}. {@code AdminPermission} * objects created with this constructor cannot be added to an * {@code AdminPermission} permission collection. * * @param bundle A bundle. * @param actions {@code class}, {@code execute}, {@code extensionLifecycle} * , {@code lifecycle}, {@code listener}, {@code metadata}, * {@code resolve} , {@code resource}, {@code startlevel}, * {@code context}, {@code weave}. A value of "*" or {@code null} * indicates all actions. * @since 1.3 */ public AdminPermission(Bundle bundle, String actions) { super(createName(bundle)); setTransients(null, parseActions(actions)); this.bundle = bundle; } /** * Create a permission name from a Bundle * * @param bundle Bundle to use to create permission name. * @return permission name. */ private static String createName(Bundle bundle) { if (bundle == null) { throw new IllegalArgumentException("bundle must not be null"); } StringBuilder sb = new StringBuilder("(id="); sb.append(bundle.getBundleId()); sb.append(")"); return sb.toString(); } /** * Package private constructor used by AdminPermissionCollection. * * @param filter name filter or {@code null} for wildcard. * @param mask action mask */ AdminPermission(Filter filter, int mask) { super((filter == null) ? "*" : filter.toString()); setTransients(filter, mask); this.bundle = null; } /** * Called by constructors and when deserialized. * * @param filter Permission's filter or {@code null} for wildcard. * @param mask action mask */ private void setTransients(Filter filter, int mask) { this.filter = filter; if ((mask == ACTION_NONE) || ((mask & ACTION_ALL) != mask)) { throw new IllegalArgumentException("invalid action string"); } this.action_mask = mask; } /** * Parse action string into action mask. * * @param actions Action string. * @return action mask. */ private static int parseActions(String actions) { if ((actions == null) || actions.equals("*")) { return ACTION_ALL; } boolean seencomma = false; int mask = ACTION_NONE; char[] a = actions.toCharArray(); int i = a.length - 1; if (i < 0) return mask; while (i != -1) { char c; // skip whitespace while ((i != -1) && ((c = a[i]) == ' ' || c == '\r' || c == '\n' || c == '\f' || c == '\t')) i--; // check for the known strings int matchlen; if (i >= 4 && (a[i - 4] == 'c' || a[i - 4] == 'C') && (a[i - 3] == 'l' || a[i - 3] == 'L') && (a[i - 2] == 'a' || a[i - 2] == 'A') && (a[i - 1] == 's' || a[i - 1] == 'S') && (a[i] == 's' || a[i] == 'S')) { matchlen = 5; mask |= ACTION_CLASS | ACTION_RESOLVE; } else if (i >= 6 && (a[i - 6] == 'e' || a[i - 6] == 'E') && (a[i - 5] == 'x' || a[i - 5] == 'X') && (a[i - 4] == 'e' || a[i - 4] == 'E') && (a[i - 3] == 'c' || a[i - 3] == 'C') && (a[i - 2] == 'u' || a[i - 2] == 'U') && (a[i - 1] == 't' || a[i - 1] == 'T') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 7; mask |= ACTION_EXECUTE | ACTION_RESOLVE; } else if (i >= 17 && (a[i - 17] == 'e' || a[i - 17] == 'E') && (a[i - 16] == 'x' || a[i - 16] == 'X') && (a[i - 15] == 't' || a[i - 15] == 'T') && (a[i - 14] == 'e' || a[i - 14] == 'E') && (a[i - 13] == 'n' || a[i - 13] == 'N') && (a[i - 12] == 's' || a[i - 12] == 'S') && (a[i - 11] == 'i' || a[i - 11] == 'I') && (a[i - 10] == 'o' || a[i - 10] == 'O') && (a[i - 9] == 'n' || a[i - 9] == 'N') && (a[i - 8] == 'l' || a[i - 8] == 'L') && (a[i - 7] == 'i' || a[i - 7] == 'I') && (a[i - 6] == 'f' || a[i - 6] == 'F') && (a[i - 5] == 'e' || a[i - 5] == 'E') && (a[i - 4] == 'c' || a[i - 4] == 'C') && (a[i - 3] == 'y' || a[i - 3] == 'Y') && (a[i - 2] == 'c' || a[i - 2] == 'C') && (a[i - 1] == 'l' || a[i - 1] == 'L') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 18; mask |= ACTION_EXTENSIONLIFECYCLE; } else if (i >= 8 && (a[i - 8] == 'l' || a[i - 8] == 'L') && (a[i - 7] == 'i' || a[i - 7] == 'I') && (a[i - 6] == 'f' || a[i - 6] == 'F') && (a[i - 5] == 'e' || a[i - 5] == 'E') && (a[i - 4] == 'c' || a[i - 4] == 'C') && (a[i - 3] == 'y' || a[i - 3] == 'Y') && (a[i - 2] == 'c' || a[i - 2] == 'C') && (a[i - 1] == 'l' || a[i - 1] == 'L') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 9; mask |= ACTION_LIFECYCLE; } else if (i >= 7 && (a[i - 7] == 'l' || a[i - 7] == 'L') && (a[i - 6] == 'i' || a[i - 6] == 'I') && (a[i - 5] == 's' || a[i - 5] == 'S') && (a[i - 4] == 't' || a[i - 4] == 'T') && (a[i - 3] == 'e' || a[i - 3] == 'E') && (a[i - 2] == 'n' || a[i - 2] == 'N') && (a[i - 1] == 'e' || a[i - 1] == 'E') && (a[i] == 'r' || a[i] == 'R')) { matchlen = 8; mask |= ACTION_LISTENER; } else if (i >= 7 && (a[i - 7] == 'm' || a[i - 7] == 'M') && (a[i - 6] == 'e' || a[i - 6] == 'E') && (a[i - 5] == 't' || a[i - 5] == 'T') && (a[i - 4] == 'a' || a[i - 4] == 'A') && (a[i - 3] == 'd' || a[i - 3] == 'D') && (a[i - 2] == 'a' || a[i - 2] == 'A') && (a[i - 1] == 't' || a[i - 1] == 'T') && (a[i] == 'a' || a[i] == 'A')) { matchlen = 8; mask |= ACTION_METADATA; } else if (i >= 6 && (a[i - 6] == 'r' || a[i - 6] == 'R') && (a[i - 5] == 'e' || a[i - 5] == 'E') && (a[i - 4] == 's' || a[i - 4] == 'S') && (a[i - 3] == 'o' || a[i - 3] == 'O') && (a[i - 2] == 'l' || a[i - 2] == 'L') && (a[i - 1] == 'v' || a[i - 1] == 'V') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 7; mask |= ACTION_RESOLVE; } else if (i >= 7 && (a[i - 7] == 'r' || a[i - 7] == 'R') && (a[i - 6] == 'e' || a[i - 6] == 'E') && (a[i - 5] == 's' || a[i - 5] == 'S') && (a[i - 4] == 'o' || a[i - 4] == 'O') && (a[i - 3] == 'u' || a[i - 3] == 'U') && (a[i - 2] == 'r' || a[i - 2] == 'R') && (a[i - 1] == 'c' || a[i - 1] == 'C') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 8; mask |= ACTION_RESOURCE | ACTION_RESOLVE; } else if (i >= 9 && (a[i - 9] == 's' || a[i - 9] == 'S') && (a[i - 8] == 't' || a[i - 8] == 'T') && (a[i - 7] == 'a' || a[i - 7] == 'A') && (a[i - 6] == 'r' || a[i - 6] == 'R') && (a[i - 5] == 't' || a[i - 5] == 'T') && (a[i - 4] == 'l' || a[i - 4] == 'L') && (a[i - 3] == 'e' || a[i - 3] == 'E') && (a[i - 2] == 'v' || a[i - 2] == 'V') && (a[i - 1] == 'e' || a[i - 1] == 'E') && (a[i] == 'l' || a[i] == 'L')) { matchlen = 10; mask |= ACTION_STARTLEVEL; } else if (i >= 6 && (a[i - 6] == 'c' || a[i - 6] == 'C') && (a[i - 5] == 'o' || a[i - 5] == 'O') && (a[i - 4] == 'n' || a[i - 4] == 'N') && (a[i - 3] == 't' || a[i - 3] == 'T') && (a[i - 2] == 'e' || a[i - 2] == 'E') && (a[i - 1] == 'x' || a[i - 1] == 'X') && (a[i] == 't' || a[i] == 'T')) { matchlen = 7; mask |= ACTION_CONTEXT; } else if (i >= 4 && (a[i - 4] == 'w' || a[i - 4] == 'W') && (a[i - 3] == 'e' || a[i - 3] == 'E') && (a[i - 2] == 'a' || a[i - 2] == 'A') && (a[i - 1] == 'v' || a[i - 1] == 'V') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 5; mask |= ACTION_WEAVE; } else if (i >= 0 && (a[i] == '*')) { matchlen = 1; mask |= ACTION_ALL; } else { // parse error throw new IllegalArgumentException("invalid permission: " + actions); } // make sure we didn't just match the tail of a word // like "ackbarfstartlevel". Also, skip to the comma. seencomma = false; while (i >= matchlen && !seencomma) { switch (a[i - matchlen]) { case ',' : seencomma = true; /* FALLTHROUGH */ case ' ' : case '\r' : case '\n' : case '\f' : case '\t' : break; default : throw new IllegalArgumentException("invalid permission: " + actions); } i--; } // point i at the location of the comma minus one (or -1). i -= matchlen; } if (seencomma) { throw new IllegalArgumentException("invalid permission: " + actions); } return mask; } /** * Parse filter string into a Filter object. * * @param filterString The filter string to parse. * @return a Filter for this bundle. If the specified filterString is * {@code null} or equals "*", then {@code null} is returned to * indicate a wildcard. * @throws IllegalArgumentException If the filter syntax is invalid. */ private static Filter parseFilter(String filterString) { if (filterString == null) { return null; } filterString = filterString.trim(); if (filterString.equals("*")) { return null; } try { return FrameworkUtil.createFilter(filterString); } catch (InvalidSyntaxException e) { throw new IllegalArgumentException("invalid filter", e); } } /** * Determines if the specified permission is implied by this object. This * method throws an exception if the specified permission was not * constructed with a bundle. * * <p> * This method returns {@code true} if the specified permission is an * AdminPermission AND * <ul> * <li>this object's filter matches the specified permission's bundle ID, * bundle symbolic name, bundle location and bundle signer distinguished * name chain OR</li> * <li>this object's filter is "*"</li> * </ul> * AND this object's actions include all of the specified permission's * actions. * <p> * Special case: if the specified permission was constructed with "*" * filter, then this method returns {@code true} if this object's filter is * "*" and this object's actions include all of the specified permission's * actions * * @param p The requested permission. * @return {@code true} if the specified permission is implied by this * object; {@code false} otherwise. */ @Override public boolean implies(Permission p) { if (!(p instanceof AdminPermission)) { return false; } AdminPermission requested = (AdminPermission) p; if (bundle != null) { return false; } // if requested permission has a filter, then it is an invalid argument if (requested.filter != null) { return false; } return implies0(requested, ACTION_NONE); } /** * Internal implies method. Used by the implies and the permission * collection implies methods. * * @param requested The requested AdminPermision which has already be * validated as a proper argument. The requested AdminPermission must * not have a filter expression. * @param effective The effective actions with which to start. * @return {@code true} if the specified permission is implied by this * object; {@code false} otherwise. */ boolean implies0(AdminPermission requested, int effective) { /* check actions first - much faster */ effective |= action_mask; final int desired = requested.action_mask; if ((effective & desired) != desired) { return false; } /* Get our filter */ Filter f = filter; if (f == null) { // it's "*" return true; } /* is requested a wildcard filter? */ if (requested.bundle == null) { return false; } Map<String, Object> requestedProperties = requested.getProperties(); if (requestedProperties == null) { /* * If the requested properties are null, then we have detected a * recursion getting the bundle location. So we return true to * permit the bundle location request in the AdminPermission check * up the stack to succeed. */ return true; } return f.matches(requestedProperties); } /** * Returns the canonical string representation of the * {@code AdminPermission} actions. * * <p> * Always returns present {@code AdminPermission} actions in the following * order: {@code class}, {@code execute}, {@code extensionLifecycle}, * {@code lifecycle}, {@code listener}, {@code metadata}, {@code resolve}, * {@code resource}, {@code startlevel}, {@code context}, {@code weave}. * * @return Canonical string representation of the {@code AdminPermission} * actions. */ @Override public String getActions() { String result = actions; if (result == null) { StringBuilder sb = new StringBuilder(); int mask = action_mask; if ((mask & ACTION_CLASS) == ACTION_CLASS) { sb.append(CLASS); sb.append(','); } if ((mask & ACTION_EXECUTE) == ACTION_EXECUTE) { sb.append(EXECUTE); sb.append(','); } if ((mask & ACTION_EXTENSIONLIFECYCLE) == ACTION_EXTENSIONLIFECYCLE) { sb.append(EXTENSIONLIFECYCLE); sb.append(','); } if ((mask & ACTION_LIFECYCLE) == ACTION_LIFECYCLE) { sb.append(LIFECYCLE); sb.append(','); } if ((mask & ACTION_LISTENER) == ACTION_LISTENER) { sb.append(LISTENER); sb.append(','); } if ((mask & ACTION_METADATA) == ACTION_METADATA) { sb.append(METADATA); sb.append(','); } if ((mask & ACTION_RESOLVE) == ACTION_RESOLVE) { sb.append(RESOLVE); sb.append(','); } if ((mask & ACTION_RESOURCE) == ACTION_RESOURCE) { sb.append(RESOURCE); sb.append(','); } if ((mask & ACTION_STARTLEVEL) == ACTION_STARTLEVEL) { sb.append(STARTLEVEL); sb.append(','); } if ((mask & ACTION_CONTEXT) == ACTION_CONTEXT) { sb.append(CONTEXT); sb.append(','); } if ((mask & ACTION_WEAVE) == ACTION_WEAVE) { sb.append(WEAVE); sb.append(','); } // remove trailing comma if (sb.length() > 0) { sb.setLength(sb.length() - 1); } actions = result = sb.toString(); } return result; } /** * Returns a new {@code PermissionCollection} object suitable for storing * {@code AdminPermission}s. * * @return A new {@code PermissionCollection} object. */ @Override public PermissionCollection newPermissionCollection() { return new AdminPermissionCollection(); } /** * Determines the equality of two {@code AdminPermission} objects. * * @param obj The object being compared for equality with this object. * @return {@code true} if {@code obj} is equivalent to this * {@code AdminPermission}; {@code false} otherwise. */ @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof AdminPermission)) { return false; } AdminPermission ap = (AdminPermission) obj; return (action_mask == ap.action_mask) && ((bundle == ap.bundle) || ((bundle != null) && bundle.equals(ap.bundle))) && (filter == null ? ap.filter == null : filter.equals(ap.filter)); } /** * Returns the hash code value for this object. * * @return Hash code value for this object. */ @Override public int hashCode() { int h = 31 * 17 + getName().hashCode(); h = 31 * h + getActions().hashCode(); if (bundle != null) { h = 31 * h + bundle.hashCode(); } return h; } /** * WriteObject is called to save the state of this permission object to a * stream. The actions are serialized, and the superclass takes care of the * name. */ private synchronized void writeObject(java.io.ObjectOutputStream s) throws IOException { if (bundle != null) { throw new NotSerializableException("cannot serialize"); } // Write out the actions. The superclass takes care of the name // call getActions to make sure actions field is initialized if (actions == null) getActions(); s.defaultWriteObject(); } /** * readObject is called to restore the state of this permission from a * stream. */ private synchronized void readObject(java.io.ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the data, then initialize the transients s.defaultReadObject(); setTransients(parseFilter(getName()), parseActions(actions)); } /** * Called by {@code implies0} on an AdminPermission which was constructed * with a Bundle. This method loads a map with the filter-matchable * properties of this bundle. The map is cached so this lookup only happens * once. * * This method should only be called on an AdminPermission which was * constructed with a bundle * * @return a map of properties for this bundle */ private Map<String, Object> getProperties() { Map<String, Object> result = properties; if (result != null) { return result; } /* * We may have recursed here due to the Bundle.getLocation call in the * doPrivileged below. If this is the case, return null to allow implies * to return true. */ final Object mark = recurse.get(); if (mark == bundle) { return null; } recurse.set(bundle); try { final Map<String, Object> map = new HashMap<String, Object>(4); AccessController.doPrivileged(new PrivilegedAction<Void>() { @Override public Void run() { map.put("id", Long.valueOf(bundle.getBundleId())); map.put("location", bundle.getLocation()); String name = bundle.getSymbolicName(); if (name != null) { map.put("name", name); } SignerProperty signer = new SignerProperty(bundle); if (signer.isBundleSigned()) { map.put("signer", signer); } return null; } }); return properties = map; } finally { recurse.set(null); } } } /** * Stores a collection of {@code AdminPermission}s. */ final class AdminPermissionCollection extends PermissionCollection { private static final long serialVersionUID = 3906372644575328048L; /** * Collection of permissions. * * @GuardedBy this */ private transient Map<String, AdminPermission> permissions; /** * Boolean saying if "*" is in the collection. * * @serial * @GuardedBy this */ private boolean all_allowed; /** * Create an empty AdminPermissions object. * */ public AdminPermissionCollection() { permissions = new HashMap<String, AdminPermission>(); } /** * Adds a permission to this permission collection. * * @param permission The {@code AdminPermission} object to add. * @throws IllegalArgumentException If the specified permission is not an * {@code AdminPermission} instance or was constructed with a Bundle * object. * @throws SecurityException If this {@code AdminPermissionCollection} * object has been marked read-only. */ @Override public void add(Permission permission) { if (!(permission instanceof AdminPermission)) { throw new IllegalArgumentException("invalid permission: " + permission); } if (isReadOnly()) { throw new SecurityException("attempt to add a Permission to a " + "readonly PermissionCollection"); } final AdminPermission ap = (AdminPermission) permission; if (ap.bundle != null) { throw new IllegalArgumentException("cannot add to collection: " + ap); } final String name = ap.getName(); synchronized (this) { Map<String, AdminPermission> pc = permissions; AdminPermission existing = pc.get(name); if (existing != null) { int oldMask = existing.action_mask; int newMask = ap.action_mask; if (oldMask != newMask) { pc.put(name, new AdminPermission(existing.filter, oldMask | newMask)); } } else { pc.put(name, ap); } if (!all_allowed) { if (name.equals("*")) { all_allowed = true; } } } } /** * Determines if the specified permissions implies the permissions expressed * in {@code permission}. * * @param permission The Permission object to compare with the * {@code AdminPermission} objects in this collection. * @return {@code true} if {@code permission} is implied by an * {@code AdminPermission} in this collection, {@code false} * otherwise. */ @Override public boolean implies(Permission permission) { if (!(permission instanceof AdminPermission)) { return false; } AdminPermission requested = (AdminPermission) permission; // if requested permission has a filter, then it is an invalid argument if (requested.filter != null) { return false; } int effective = AdminPermission.ACTION_NONE; Collection<AdminPermission> perms; synchronized (this) { Map<String, AdminPermission> pc = permissions; // short circuit if the "*" Permission was added if (all_allowed) { AdminPermission ap = pc.get("*"); if (ap != null) { effective |= ap.action_mask; final int desired = requested.action_mask; if ((effective & desired) == desired) { return true; } } } perms = pc.values(); } // just iterate one by one for (AdminPermission perm : perms) { if (perm.implies0(requested, effective)) { return true; } } return false; } /** * Returns an enumeration of all {@code AdminPermission} objects in the * container. * * @return Enumeration of all {@code AdminPermission} objects. */ @Override public synchronized Enumeration<Permission> elements() { List<Permission> all = new ArrayList<Permission>(permissions.values()); return Collections.enumeration(all); } /* serialization logic */ private static final ObjectStreamField[] serialPersistentFields = {new ObjectStreamField("permissions", Hashtable.class), new ObjectStreamField("all_allowed", Boolean.TYPE)}; private synchronized void writeObject(ObjectOutputStream out) throws IOException { Hashtable<String, AdminPermission> hashtable = new Hashtable<String, AdminPermission>(permissions); ObjectOutputStream.PutField pfields = out.putFields(); pfields.put("permissions", hashtable); pfields.put("all_allowed", all_allowed); out.writeFields(); } private synchronized void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { ObjectInputStream.GetField gfields = in.readFields(); @SuppressWarnings("unchecked") Hashtable<String, AdminPermission> hashtable = (Hashtable<String, AdminPermission>) gfields.get("permissions", null); permissions = new HashMap<String, AdminPermission>(hashtable); all_allowed = gfields.get("all_allowed", false); } }
googleapis/google-cloud-java
34,923
java-maps-routing/proto-google-maps-routing-v2/src/main/java/com/google/maps/routing/v2/SpeedReadingInterval.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/maps/routing/v2/speed_reading_interval.proto // Protobuf Java Version: 3.25.8 package com.google.maps.routing.v2; /** * * * <pre> * Traffic density indicator on a contiguous segment of a polyline or path. * Given a path with points P_0, P_1, ... , P_N (zero-based index), the * `SpeedReadingInterval` defines an interval and describes its traffic using * the following categories. * </pre> * * Protobuf type {@code google.maps.routing.v2.SpeedReadingInterval} */ public final class SpeedReadingInterval extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.maps.routing.v2.SpeedReadingInterval) SpeedReadingIntervalOrBuilder { private static final long serialVersionUID = 0L; // Use SpeedReadingInterval.newBuilder() to construct. private SpeedReadingInterval(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SpeedReadingInterval() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SpeedReadingInterval(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.maps.routing.v2.SpeedReadingIntervalProto .internal_static_google_maps_routing_v2_SpeedReadingInterval_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.maps.routing.v2.SpeedReadingIntervalProto .internal_static_google_maps_routing_v2_SpeedReadingInterval_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.maps.routing.v2.SpeedReadingInterval.class, com.google.maps.routing.v2.SpeedReadingInterval.Builder.class); } /** * * * <pre> * The classification of polyline speed based on traffic data. * </pre> * * Protobuf enum {@code google.maps.routing.v2.SpeedReadingInterval.Speed} */ public enum Speed implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Default value. This value is unused. * </pre> * * <code>SPEED_UNSPECIFIED = 0;</code> */ SPEED_UNSPECIFIED(0), /** * * * <pre> * Normal speed, no slowdown is detected. * </pre> * * <code>NORMAL = 1;</code> */ NORMAL(1), /** * * * <pre> * Slowdown detected, but no traffic jam formed. * </pre> * * <code>SLOW = 2;</code> */ SLOW(2), /** * * * <pre> * Traffic jam detected. * </pre> * * <code>TRAFFIC_JAM = 3;</code> */ TRAFFIC_JAM(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Default value. This value is unused. * </pre> * * <code>SPEED_UNSPECIFIED = 0;</code> */ public static final int SPEED_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Normal speed, no slowdown is detected. * </pre> * * <code>NORMAL = 1;</code> */ public static final int NORMAL_VALUE = 1; /** * * * <pre> * Slowdown detected, but no traffic jam formed. * </pre> * * <code>SLOW = 2;</code> */ public static final int SLOW_VALUE = 2; /** * * * <pre> * Traffic jam detected. * </pre> * * <code>TRAFFIC_JAM = 3;</code> */ public static final int TRAFFIC_JAM_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Speed valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Speed forNumber(int value) { switch (value) { case 0: return SPEED_UNSPECIFIED; case 1: return NORMAL; case 2: return SLOW; case 3: return TRAFFIC_JAM; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Speed> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Speed> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Speed>() { public Speed findValueByNumber(int number) { return Speed.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.maps.routing.v2.SpeedReadingInterval.getDescriptor().getEnumTypes().get(0); } private static final Speed[] VALUES = values(); public static Speed valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Speed(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.maps.routing.v2.SpeedReadingInterval.Speed) } private int bitField0_; private int speedTypeCase_ = 0; @SuppressWarnings("serial") private java.lang.Object speedType_; public enum SpeedTypeCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { SPEED(3), SPEEDTYPE_NOT_SET(0); private final int value; private SpeedTypeCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SpeedTypeCase valueOf(int value) { return forNumber(value); } public static SpeedTypeCase forNumber(int value) { switch (value) { case 3: return SPEED; case 0: return SPEEDTYPE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SpeedTypeCase getSpeedTypeCase() { return SpeedTypeCase.forNumber(speedTypeCase_); } public static final int START_POLYLINE_POINT_INDEX_FIELD_NUMBER = 1; private int startPolylinePointIndex_ = 0; /** * * * <pre> * The starting index of this interval in the polyline. * </pre> * * <code>optional int32 start_polyline_point_index = 1;</code> * * @return Whether the startPolylinePointIndex field is set. */ @java.lang.Override public boolean hasStartPolylinePointIndex() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The starting index of this interval in the polyline. * </pre> * * <code>optional int32 start_polyline_point_index = 1;</code> * * @return The startPolylinePointIndex. */ @java.lang.Override public int getStartPolylinePointIndex() { return startPolylinePointIndex_; } public static final int END_POLYLINE_POINT_INDEX_FIELD_NUMBER = 2; private int endPolylinePointIndex_ = 0; /** * * * <pre> * The ending index of this interval in the polyline. * </pre> * * <code>optional int32 end_polyline_point_index = 2;</code> * * @return Whether the endPolylinePointIndex field is set. */ @java.lang.Override public boolean hasEndPolylinePointIndex() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The ending index of this interval in the polyline. * </pre> * * <code>optional int32 end_polyline_point_index = 2;</code> * * @return The endPolylinePointIndex. */ @java.lang.Override public int getEndPolylinePointIndex() { return endPolylinePointIndex_; } public static final int SPEED_FIELD_NUMBER = 3; /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return Whether the speed field is set. */ public boolean hasSpeed() { return speedTypeCase_ == 3; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return The enum numeric value on the wire for speed. */ public int getSpeedValue() { if (speedTypeCase_ == 3) { return (java.lang.Integer) speedType_; } return 0; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return The speed. */ public com.google.maps.routing.v2.SpeedReadingInterval.Speed getSpeed() { if (speedTypeCase_ == 3) { com.google.maps.routing.v2.SpeedReadingInterval.Speed result = com.google.maps.routing.v2.SpeedReadingInterval.Speed.forNumber( (java.lang.Integer) speedType_); return result == null ? com.google.maps.routing.v2.SpeedReadingInterval.Speed.UNRECOGNIZED : result; } return com.google.maps.routing.v2.SpeedReadingInterval.Speed.SPEED_UNSPECIFIED; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(1, startPolylinePointIndex_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(2, endPolylinePointIndex_); } if (speedTypeCase_ == 3) { output.writeEnum(3, ((java.lang.Integer) speedType_)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, startPolylinePointIndex_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, endPolylinePointIndex_); } if (speedTypeCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeEnumSize( 3, ((java.lang.Integer) speedType_)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.maps.routing.v2.SpeedReadingInterval)) { return super.equals(obj); } com.google.maps.routing.v2.SpeedReadingInterval other = (com.google.maps.routing.v2.SpeedReadingInterval) obj; if (hasStartPolylinePointIndex() != other.hasStartPolylinePointIndex()) return false; if (hasStartPolylinePointIndex()) { if (getStartPolylinePointIndex() != other.getStartPolylinePointIndex()) return false; } if (hasEndPolylinePointIndex() != other.hasEndPolylinePointIndex()) return false; if (hasEndPolylinePointIndex()) { if (getEndPolylinePointIndex() != other.getEndPolylinePointIndex()) return false; } if (!getSpeedTypeCase().equals(other.getSpeedTypeCase())) return false; switch (speedTypeCase_) { case 3: if (getSpeedValue() != other.getSpeedValue()) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartPolylinePointIndex()) { hash = (37 * hash) + START_POLYLINE_POINT_INDEX_FIELD_NUMBER; hash = (53 * hash) + getStartPolylinePointIndex(); } if (hasEndPolylinePointIndex()) { hash = (37 * hash) + END_POLYLINE_POINT_INDEX_FIELD_NUMBER; hash = (53 * hash) + getEndPolylinePointIndex(); } switch (speedTypeCase_) { case 3: hash = (37 * hash) + SPEED_FIELD_NUMBER; hash = (53 * hash) + getSpeedValue(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.maps.routing.v2.SpeedReadingInterval parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.maps.routing.v2.SpeedReadingInterval parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.maps.routing.v2.SpeedReadingInterval parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.maps.routing.v2.SpeedReadingInterval prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Traffic density indicator on a contiguous segment of a polyline or path. * Given a path with points P_0, P_1, ... , P_N (zero-based index), the * `SpeedReadingInterval` defines an interval and describes its traffic using * the following categories. * </pre> * * Protobuf type {@code google.maps.routing.v2.SpeedReadingInterval} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.maps.routing.v2.SpeedReadingInterval) com.google.maps.routing.v2.SpeedReadingIntervalOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.maps.routing.v2.SpeedReadingIntervalProto .internal_static_google_maps_routing_v2_SpeedReadingInterval_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.maps.routing.v2.SpeedReadingIntervalProto .internal_static_google_maps_routing_v2_SpeedReadingInterval_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.maps.routing.v2.SpeedReadingInterval.class, com.google.maps.routing.v2.SpeedReadingInterval.Builder.class); } // Construct using com.google.maps.routing.v2.SpeedReadingInterval.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; startPolylinePointIndex_ = 0; endPolylinePointIndex_ = 0; speedTypeCase_ = 0; speedType_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.maps.routing.v2.SpeedReadingIntervalProto .internal_static_google_maps_routing_v2_SpeedReadingInterval_descriptor; } @java.lang.Override public com.google.maps.routing.v2.SpeedReadingInterval getDefaultInstanceForType() { return com.google.maps.routing.v2.SpeedReadingInterval.getDefaultInstance(); } @java.lang.Override public com.google.maps.routing.v2.SpeedReadingInterval build() { com.google.maps.routing.v2.SpeedReadingInterval result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.maps.routing.v2.SpeedReadingInterval buildPartial() { com.google.maps.routing.v2.SpeedReadingInterval result = new com.google.maps.routing.v2.SpeedReadingInterval(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.maps.routing.v2.SpeedReadingInterval result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.startPolylinePointIndex_ = startPolylinePointIndex_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.endPolylinePointIndex_ = endPolylinePointIndex_; to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.maps.routing.v2.SpeedReadingInterval result) { result.speedTypeCase_ = speedTypeCase_; result.speedType_ = this.speedType_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.maps.routing.v2.SpeedReadingInterval) { return mergeFrom((com.google.maps.routing.v2.SpeedReadingInterval) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.maps.routing.v2.SpeedReadingInterval other) { if (other == com.google.maps.routing.v2.SpeedReadingInterval.getDefaultInstance()) return this; if (other.hasStartPolylinePointIndex()) { setStartPolylinePointIndex(other.getStartPolylinePointIndex()); } if (other.hasEndPolylinePointIndex()) { setEndPolylinePointIndex(other.getEndPolylinePointIndex()); } switch (other.getSpeedTypeCase()) { case SPEED: { setSpeedValue(other.getSpeedValue()); break; } case SPEEDTYPE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { startPolylinePointIndex_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { endPolylinePointIndex_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { int rawValue = input.readEnum(); speedTypeCase_ = 3; speedType_ = rawValue; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int speedTypeCase_ = 0; private java.lang.Object speedType_; public SpeedTypeCase getSpeedTypeCase() { return SpeedTypeCase.forNumber(speedTypeCase_); } public Builder clearSpeedType() { speedTypeCase_ = 0; speedType_ = null; onChanged(); return this; } private int bitField0_; private int startPolylinePointIndex_; /** * * * <pre> * The starting index of this interval in the polyline. * </pre> * * <code>optional int32 start_polyline_point_index = 1;</code> * * @return Whether the startPolylinePointIndex field is set. */ @java.lang.Override public boolean hasStartPolylinePointIndex() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The starting index of this interval in the polyline. * </pre> * * <code>optional int32 start_polyline_point_index = 1;</code> * * @return The startPolylinePointIndex. */ @java.lang.Override public int getStartPolylinePointIndex() { return startPolylinePointIndex_; } /** * * * <pre> * The starting index of this interval in the polyline. * </pre> * * <code>optional int32 start_polyline_point_index = 1;</code> * * @param value The startPolylinePointIndex to set. * @return This builder for chaining. */ public Builder setStartPolylinePointIndex(int value) { startPolylinePointIndex_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The starting index of this interval in the polyline. * </pre> * * <code>optional int32 start_polyline_point_index = 1;</code> * * @return This builder for chaining. */ public Builder clearStartPolylinePointIndex() { bitField0_ = (bitField0_ & ~0x00000001); startPolylinePointIndex_ = 0; onChanged(); return this; } private int endPolylinePointIndex_; /** * * * <pre> * The ending index of this interval in the polyline. * </pre> * * <code>optional int32 end_polyline_point_index = 2;</code> * * @return Whether the endPolylinePointIndex field is set. */ @java.lang.Override public boolean hasEndPolylinePointIndex() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The ending index of this interval in the polyline. * </pre> * * <code>optional int32 end_polyline_point_index = 2;</code> * * @return The endPolylinePointIndex. */ @java.lang.Override public int getEndPolylinePointIndex() { return endPolylinePointIndex_; } /** * * * <pre> * The ending index of this interval in the polyline. * </pre> * * <code>optional int32 end_polyline_point_index = 2;</code> * * @param value The endPolylinePointIndex to set. * @return This builder for chaining. */ public Builder setEndPolylinePointIndex(int value) { endPolylinePointIndex_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The ending index of this interval in the polyline. * </pre> * * <code>optional int32 end_polyline_point_index = 2;</code> * * @return This builder for chaining. */ public Builder clearEndPolylinePointIndex() { bitField0_ = (bitField0_ & ~0x00000002); endPolylinePointIndex_ = 0; onChanged(); return this; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return Whether the speed field is set. */ @java.lang.Override public boolean hasSpeed() { return speedTypeCase_ == 3; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return The enum numeric value on the wire for speed. */ @java.lang.Override public int getSpeedValue() { if (speedTypeCase_ == 3) { return ((java.lang.Integer) speedType_).intValue(); } return 0; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @param value The enum numeric value on the wire for speed to set. * @return This builder for chaining. */ public Builder setSpeedValue(int value) { speedTypeCase_ = 3; speedType_ = value; onChanged(); return this; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return The speed. */ @java.lang.Override public com.google.maps.routing.v2.SpeedReadingInterval.Speed getSpeed() { if (speedTypeCase_ == 3) { com.google.maps.routing.v2.SpeedReadingInterval.Speed result = com.google.maps.routing.v2.SpeedReadingInterval.Speed.forNumber( (java.lang.Integer) speedType_); return result == null ? com.google.maps.routing.v2.SpeedReadingInterval.Speed.UNRECOGNIZED : result; } return com.google.maps.routing.v2.SpeedReadingInterval.Speed.SPEED_UNSPECIFIED; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @param value The speed to set. * @return This builder for chaining. */ public Builder setSpeed(com.google.maps.routing.v2.SpeedReadingInterval.Speed value) { if (value == null) { throw new NullPointerException(); } speedTypeCase_ = 3; speedType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Traffic speed in this interval. * </pre> * * <code>.google.maps.routing.v2.SpeedReadingInterval.Speed speed = 3;</code> * * @return This builder for chaining. */ public Builder clearSpeed() { if (speedTypeCase_ == 3) { speedTypeCase_ = 0; speedType_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.maps.routing.v2.SpeedReadingInterval) } // @@protoc_insertion_point(class_scope:google.maps.routing.v2.SpeedReadingInterval) private static final com.google.maps.routing.v2.SpeedReadingInterval DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.maps.routing.v2.SpeedReadingInterval(); } public static com.google.maps.routing.v2.SpeedReadingInterval getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SpeedReadingInterval> PARSER = new com.google.protobuf.AbstractParser<SpeedReadingInterval>() { @java.lang.Override public SpeedReadingInterval parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SpeedReadingInterval> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SpeedReadingInterval> getParserForType() { return PARSER; } @java.lang.Override public com.google.maps.routing.v2.SpeedReadingInterval getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
34,982
java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/SearchResult.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataform/v1beta1/dataform.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataform.v1beta1; /** * * * <pre> * Client-facing representation of a search result entry. * </pre> * * Protobuf type {@code google.cloud.dataform.v1beta1.SearchResult} */ public final class SearchResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.SearchResult) SearchResultOrBuilder { private static final long serialVersionUID = 0L; // Use SearchResult.newBuilder() to construct. private SearchResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchResult() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchResult(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_SearchResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_SearchResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1beta1.SearchResult.class, com.google.cloud.dataform.v1beta1.SearchResult.Builder.class); } private int entryCase_ = 0; @SuppressWarnings("serial") private java.lang.Object entry_; public enum EntryCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { FILE(1), DIRECTORY(2), ENTRY_NOT_SET(0); private final int value; private EntryCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static EntryCase valueOf(int value) { return forNumber(value); } public static EntryCase forNumber(int value) { switch (value) { case 1: return FILE; case 2: return DIRECTORY; case 0: return ENTRY_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public EntryCase getEntryCase() { return EntryCase.forNumber(entryCase_); } public static final int FILE_FIELD_NUMBER = 1; /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> * * @return Whether the file field is set. */ @java.lang.Override public boolean hasFile() { return entryCase_ == 1; } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> * * @return The file. */ @java.lang.Override public com.google.cloud.dataform.v1beta1.FileSearchResult getFile() { if (entryCase_ == 1) { return (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_; } return com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance(); } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ @java.lang.Override public com.google.cloud.dataform.v1beta1.FileSearchResultOrBuilder getFileOrBuilder() { if (entryCase_ == 1) { return (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_; } return com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance(); } public static final int DIRECTORY_FIELD_NUMBER = 2; /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> * * @return Whether the directory field is set. */ @java.lang.Override public boolean hasDirectory() { return entryCase_ == 2; } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> * * @return The directory. */ @java.lang.Override public com.google.cloud.dataform.v1beta1.DirectorySearchResult getDirectory() { if (entryCase_ == 2) { return (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_; } return com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance(); } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ @java.lang.Override public com.google.cloud.dataform.v1beta1.DirectorySearchResultOrBuilder getDirectoryOrBuilder() { if (entryCase_ == 2) { return (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_; } return com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (entryCase_ == 1) { output.writeMessage(1, (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_); } if (entryCase_ == 2) { output.writeMessage(2, (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (entryCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_); } if (entryCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataform.v1beta1.SearchResult)) { return super.equals(obj); } com.google.cloud.dataform.v1beta1.SearchResult other = (com.google.cloud.dataform.v1beta1.SearchResult) obj; if (!getEntryCase().equals(other.getEntryCase())) return false; switch (entryCase_) { case 1: if (!getFile().equals(other.getFile())) return false; break; case 2: if (!getDirectory().equals(other.getDirectory())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (entryCase_) { case 1: hash = (37 * hash) + FILE_FIELD_NUMBER; hash = (53 * hash) + getFile().hashCode(); break; case 2: hash = (37 * hash) + DIRECTORY_FIELD_NUMBER; hash = (53 * hash) + getDirectory().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.SearchResult parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1beta1.SearchResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1beta1.SearchResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataform.v1beta1.SearchResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Client-facing representation of a search result entry. * </pre> * * Protobuf type {@code google.cloud.dataform.v1beta1.SearchResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.SearchResult) com.google.cloud.dataform.v1beta1.SearchResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_SearchResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_SearchResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1beta1.SearchResult.class, com.google.cloud.dataform.v1beta1.SearchResult.Builder.class); } // Construct using com.google.cloud.dataform.v1beta1.SearchResult.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (fileBuilder_ != null) { fileBuilder_.clear(); } if (directoryBuilder_ != null) { directoryBuilder_.clear(); } entryCase_ = 0; entry_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_SearchResult_descriptor; } @java.lang.Override public com.google.cloud.dataform.v1beta1.SearchResult getDefaultInstanceForType() { return com.google.cloud.dataform.v1beta1.SearchResult.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataform.v1beta1.SearchResult build() { com.google.cloud.dataform.v1beta1.SearchResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataform.v1beta1.SearchResult buildPartial() { com.google.cloud.dataform.v1beta1.SearchResult result = new com.google.cloud.dataform.v1beta1.SearchResult(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataform.v1beta1.SearchResult result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.cloud.dataform.v1beta1.SearchResult result) { result.entryCase_ = entryCase_; result.entry_ = this.entry_; if (entryCase_ == 1 && fileBuilder_ != null) { result.entry_ = fileBuilder_.build(); } if (entryCase_ == 2 && directoryBuilder_ != null) { result.entry_ = directoryBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataform.v1beta1.SearchResult) { return mergeFrom((com.google.cloud.dataform.v1beta1.SearchResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataform.v1beta1.SearchResult other) { if (other == com.google.cloud.dataform.v1beta1.SearchResult.getDefaultInstance()) return this; switch (other.getEntryCase()) { case FILE: { mergeFile(other.getFile()); break; } case DIRECTORY: { mergeDirectory(other.getDirectory()); break; } case ENTRY_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getFileFieldBuilder().getBuilder(), extensionRegistry); entryCase_ = 1; break; } // case 10 case 18: { input.readMessage(getDirectoryFieldBuilder().getBuilder(), extensionRegistry); entryCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int entryCase_ = 0; private java.lang.Object entry_; public EntryCase getEntryCase() { return EntryCase.forNumber(entryCase_); } public Builder clearEntry() { entryCase_ = 0; entry_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.FileSearchResult, com.google.cloud.dataform.v1beta1.FileSearchResult.Builder, com.google.cloud.dataform.v1beta1.FileSearchResultOrBuilder> fileBuilder_; /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> * * @return Whether the file field is set. */ @java.lang.Override public boolean hasFile() { return entryCase_ == 1; } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> * * @return The file. */ @java.lang.Override public com.google.cloud.dataform.v1beta1.FileSearchResult getFile() { if (fileBuilder_ == null) { if (entryCase_ == 1) { return (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_; } return com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance(); } else { if (entryCase_ == 1) { return fileBuilder_.getMessage(); } return com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance(); } } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ public Builder setFile(com.google.cloud.dataform.v1beta1.FileSearchResult value) { if (fileBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entry_ = value; onChanged(); } else { fileBuilder_.setMessage(value); } entryCase_ = 1; return this; } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ public Builder setFile( com.google.cloud.dataform.v1beta1.FileSearchResult.Builder builderForValue) { if (fileBuilder_ == null) { entry_ = builderForValue.build(); onChanged(); } else { fileBuilder_.setMessage(builderForValue.build()); } entryCase_ = 1; return this; } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ public Builder mergeFile(com.google.cloud.dataform.v1beta1.FileSearchResult value) { if (fileBuilder_ == null) { if (entryCase_ == 1 && entry_ != com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance()) { entry_ = com.google.cloud.dataform.v1beta1.FileSearchResult.newBuilder( (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_) .mergeFrom(value) .buildPartial(); } else { entry_ = value; } onChanged(); } else { if (entryCase_ == 1) { fileBuilder_.mergeFrom(value); } else { fileBuilder_.setMessage(value); } } entryCase_ = 1; return this; } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ public Builder clearFile() { if (fileBuilder_ == null) { if (entryCase_ == 1) { entryCase_ = 0; entry_ = null; onChanged(); } } else { if (entryCase_ == 1) { entryCase_ = 0; entry_ = null; } fileBuilder_.clear(); } return this; } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ public com.google.cloud.dataform.v1beta1.FileSearchResult.Builder getFileBuilder() { return getFileFieldBuilder().getBuilder(); } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ @java.lang.Override public com.google.cloud.dataform.v1beta1.FileSearchResultOrBuilder getFileOrBuilder() { if ((entryCase_ == 1) && (fileBuilder_ != null)) { return fileBuilder_.getMessageOrBuilder(); } else { if (entryCase_ == 1) { return (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_; } return com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance(); } } /** * * * <pre> * Details when search result is a file. * </pre> * * <code>.google.cloud.dataform.v1beta1.FileSearchResult file = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.FileSearchResult, com.google.cloud.dataform.v1beta1.FileSearchResult.Builder, com.google.cloud.dataform.v1beta1.FileSearchResultOrBuilder> getFileFieldBuilder() { if (fileBuilder_ == null) { if (!(entryCase_ == 1)) { entry_ = com.google.cloud.dataform.v1beta1.FileSearchResult.getDefaultInstance(); } fileBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.FileSearchResult, com.google.cloud.dataform.v1beta1.FileSearchResult.Builder, com.google.cloud.dataform.v1beta1.FileSearchResultOrBuilder>( (com.google.cloud.dataform.v1beta1.FileSearchResult) entry_, getParentForChildren(), isClean()); entry_ = null; } entryCase_ = 1; onChanged(); return fileBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.DirectorySearchResult, com.google.cloud.dataform.v1beta1.DirectorySearchResult.Builder, com.google.cloud.dataform.v1beta1.DirectorySearchResultOrBuilder> directoryBuilder_; /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> * * @return Whether the directory field is set. */ @java.lang.Override public boolean hasDirectory() { return entryCase_ == 2; } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> * * @return The directory. */ @java.lang.Override public com.google.cloud.dataform.v1beta1.DirectorySearchResult getDirectory() { if (directoryBuilder_ == null) { if (entryCase_ == 2) { return (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_; } return com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance(); } else { if (entryCase_ == 2) { return directoryBuilder_.getMessage(); } return com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance(); } } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ public Builder setDirectory(com.google.cloud.dataform.v1beta1.DirectorySearchResult value) { if (directoryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entry_ = value; onChanged(); } else { directoryBuilder_.setMessage(value); } entryCase_ = 2; return this; } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ public Builder setDirectory( com.google.cloud.dataform.v1beta1.DirectorySearchResult.Builder builderForValue) { if (directoryBuilder_ == null) { entry_ = builderForValue.build(); onChanged(); } else { directoryBuilder_.setMessage(builderForValue.build()); } entryCase_ = 2; return this; } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ public Builder mergeDirectory(com.google.cloud.dataform.v1beta1.DirectorySearchResult value) { if (directoryBuilder_ == null) { if (entryCase_ == 2 && entry_ != com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance()) { entry_ = com.google.cloud.dataform.v1beta1.DirectorySearchResult.newBuilder( (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_) .mergeFrom(value) .buildPartial(); } else { entry_ = value; } onChanged(); } else { if (entryCase_ == 2) { directoryBuilder_.mergeFrom(value); } else { directoryBuilder_.setMessage(value); } } entryCase_ = 2; return this; } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ public Builder clearDirectory() { if (directoryBuilder_ == null) { if (entryCase_ == 2) { entryCase_ = 0; entry_ = null; onChanged(); } } else { if (entryCase_ == 2) { entryCase_ = 0; entry_ = null; } directoryBuilder_.clear(); } return this; } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ public com.google.cloud.dataform.v1beta1.DirectorySearchResult.Builder getDirectoryBuilder() { return getDirectoryFieldBuilder().getBuilder(); } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ @java.lang.Override public com.google.cloud.dataform.v1beta1.DirectorySearchResultOrBuilder getDirectoryOrBuilder() { if ((entryCase_ == 2) && (directoryBuilder_ != null)) { return directoryBuilder_.getMessageOrBuilder(); } else { if (entryCase_ == 2) { return (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_; } return com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance(); } } /** * * * <pre> * Details when search result is a directory. * </pre> * * <code>.google.cloud.dataform.v1beta1.DirectorySearchResult directory = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.DirectorySearchResult, com.google.cloud.dataform.v1beta1.DirectorySearchResult.Builder, com.google.cloud.dataform.v1beta1.DirectorySearchResultOrBuilder> getDirectoryFieldBuilder() { if (directoryBuilder_ == null) { if (!(entryCase_ == 2)) { entry_ = com.google.cloud.dataform.v1beta1.DirectorySearchResult.getDefaultInstance(); } directoryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.DirectorySearchResult, com.google.cloud.dataform.v1beta1.DirectorySearchResult.Builder, com.google.cloud.dataform.v1beta1.DirectorySearchResultOrBuilder>( (com.google.cloud.dataform.v1beta1.DirectorySearchResult) entry_, getParentForChildren(), isClean()); entry_ = null; } entryCase_ = 2; onChanged(); return directoryBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.SearchResult) } // @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.SearchResult) private static final com.google.cloud.dataform.v1beta1.SearchResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.SearchResult(); } public static com.google.cloud.dataform.v1beta1.SearchResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchResult> PARSER = new com.google.protobuf.AbstractParser<SearchResult>() { @java.lang.Override public SearchResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataform.v1beta1.SearchResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,016
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/RecommendSpecRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/model_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [ModelService.RecommendSpec][google.cloud.aiplatform.v1beta1.ModelService.RecommendSpec]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.RecommendSpecRequest} */ public final class RecommendSpecRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.RecommendSpecRequest) RecommendSpecRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RecommendSpecRequest.newBuilder() to construct. private RecommendSpecRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RecommendSpecRequest() { parent_ = ""; gcsUri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RecommendSpecRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1beta1_RecommendSpecRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1beta1_RecommendSpecRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.class, com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GCS_URI_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object gcsUri_ = ""; /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The gcsUri. */ @java.lang.Override public java.lang.String getGcsUri() { java.lang.Object ref = gcsUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gcsUri_ = s; return s; } } /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for gcsUri. */ @java.lang.Override public com.google.protobuf.ByteString getGcsUriBytes() { java.lang.Object ref = gcsUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gcsUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CHECK_MACHINE_AVAILABILITY_FIELD_NUMBER = 3; private boolean checkMachineAvailability_ = false; /** * * * <pre> * Optional. If true, check machine availability for the recommended regions. * Only return the machine spec in regions where the machine is available. * </pre> * * <code>bool check_machine_availability = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The checkMachineAvailability. */ @java.lang.Override public boolean getCheckMachineAvailability() { return checkMachineAvailability_; } public static final int CHECK_USER_QUOTA_FIELD_NUMBER = 4; private boolean checkUserQuota_ = false; /** * * * <pre> * Optional. If true, check user quota for the recommended regions. * Returns all the machine spec in regions they are available, and also the * user quota state for each machine type in each region. * </pre> * * <code>bool check_user_quota = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The checkUserQuota. */ @java.lang.Override public boolean getCheckUserQuota() { return checkUserQuota_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, gcsUri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, parent_); } if (checkMachineAvailability_ != false) { output.writeBool(3, checkMachineAvailability_); } if (checkUserQuota_ != false) { output.writeBool(4, checkUserQuota_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, gcsUri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, parent_); } if (checkMachineAvailability_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, checkMachineAvailability_); } if (checkUserQuota_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, checkUserQuota_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest other = (com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getGcsUri().equals(other.getGcsUri())) return false; if (getCheckMachineAvailability() != other.getCheckMachineAvailability()) return false; if (getCheckUserQuota() != other.getCheckUserQuota()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + GCS_URI_FIELD_NUMBER; hash = (53 * hash) + getGcsUri().hashCode(); hash = (37 * hash) + CHECK_MACHINE_AVAILABILITY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getCheckMachineAvailability()); hash = (37 * hash) + CHECK_USER_QUOTA_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getCheckUserQuota()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [ModelService.RecommendSpec][google.cloud.aiplatform.v1beta1.ModelService.RecommendSpec]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.RecommendSpecRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.RecommendSpecRequest) com.google.cloud.aiplatform.v1beta1.RecommendSpecRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1beta1_RecommendSpecRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1beta1_RecommendSpecRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.class, com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; gcsUri_ = ""; checkMachineAvailability_ = false; checkUserQuota_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1beta1_RecommendSpecRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest build() { com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest result = new com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.gcsUri_ = gcsUri_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.checkMachineAvailability_ = checkMachineAvailability_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.checkUserQuota_ = checkUserQuota_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getGcsUri().isEmpty()) { gcsUri_ = other.gcsUri_; bitField0_ |= 0x00000002; onChanged(); } if (other.getCheckMachineAvailability() != false) { setCheckMachineAvailability(other.getCheckMachineAvailability()); } if (other.getCheckUserQuota() != false) { setCheckUserQuota(other.getCheckUserQuota()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { gcsUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 10 case 18: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 18 case 24: { checkMachineAvailability_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { checkUserQuota_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location from which to recommend specs. * The users must have permission to make a call in the project. * Format: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object gcsUri_ = ""; /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The gcsUri. */ public java.lang.String getGcsUri() { java.lang.Object ref = gcsUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gcsUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for gcsUri. */ public com.google.protobuf.ByteString getGcsUriBytes() { java.lang.Object ref = gcsUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gcsUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The gcsUri to set. * @return This builder for chaining. */ public Builder setGcsUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } gcsUri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearGcsUri() { gcsUri_ = getDefaultInstance().getGcsUri(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The Google Cloud Storage URI of the custom model, storing weights * and config files (which can be used to infer the base model). * </pre> * * <code>string gcs_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for gcsUri to set. * @return This builder for chaining. */ public Builder setGcsUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); gcsUri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean checkMachineAvailability_; /** * * * <pre> * Optional. If true, check machine availability for the recommended regions. * Only return the machine spec in regions where the machine is available. * </pre> * * <code>bool check_machine_availability = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The checkMachineAvailability. */ @java.lang.Override public boolean getCheckMachineAvailability() { return checkMachineAvailability_; } /** * * * <pre> * Optional. If true, check machine availability for the recommended regions. * Only return the machine spec in regions where the machine is available. * </pre> * * <code>bool check_machine_availability = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The checkMachineAvailability to set. * @return This builder for chaining. */ public Builder setCheckMachineAvailability(boolean value) { checkMachineAvailability_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If true, check machine availability for the recommended regions. * Only return the machine spec in regions where the machine is available. * </pre> * * <code>bool check_machine_availability = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearCheckMachineAvailability() { bitField0_ = (bitField0_ & ~0x00000004); checkMachineAvailability_ = false; onChanged(); return this; } private boolean checkUserQuota_; /** * * * <pre> * Optional. If true, check user quota for the recommended regions. * Returns all the machine spec in regions they are available, and also the * user quota state for each machine type in each region. * </pre> * * <code>bool check_user_quota = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The checkUserQuota. */ @java.lang.Override public boolean getCheckUserQuota() { return checkUserQuota_; } /** * * * <pre> * Optional. If true, check user quota for the recommended regions. * Returns all the machine spec in regions they are available, and also the * user quota state for each machine type in each region. * </pre> * * <code>bool check_user_quota = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The checkUserQuota to set. * @return This builder for chaining. */ public Builder setCheckUserQuota(boolean value) { checkUserQuota_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. If true, check user quota for the recommended regions. * Returns all the machine spec in regions they are available, and also the * user quota state for each machine type in each region. * </pre> * * <code>bool check_user_quota = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearCheckUserQuota() { bitField0_ = (bitField0_ & ~0x00000008); checkUserQuota_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.RecommendSpecRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.RecommendSpecRequest) private static final com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest(); } public static com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RecommendSpecRequest> PARSER = new com.google.protobuf.AbstractParser<RecommendSpecRequest>() { @java.lang.Override public RecommendSpecRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<RecommendSpecRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RecommendSpecRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.RecommendSpecRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/rya
35,382
extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/query/FluoQueryMetadataDAO.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.indexing.pcj.fluo.app.query; import static java.util.Objects.requireNonNull; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InvalidClassException; import java.io.ObjectOutputStream; import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.commons.io.serialization.ValidatingObjectInputStream; import org.apache.fluo.api.client.SnapshotBase; import org.apache.fluo.api.client.TransactionBase; import org.apache.fluo.api.data.Bytes; import org.apache.fluo.api.data.Column; import org.apache.rya.api.client.CreatePCJ.ExportStrategy; import org.apache.rya.api.client.CreatePCJ.QueryType; import org.apache.rya.api.function.aggregation.AggregationElement; import org.apache.rya.api.function.aggregation.AggregationType; import org.apache.rya.indexing.pcj.fluo.app.ConstructGraph; import org.apache.rya.indexing.pcj.fluo.app.ConstructGraphSerializer; import org.apache.rya.indexing.pcj.fluo.app.IncrementalUpdateConstants; import org.apache.rya.indexing.pcj.fluo.app.NodeType; import org.apache.rya.indexing.pcj.fluo.app.query.JoinMetadata.JoinType; import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.Sets; import edu.umd.cs.findbugs.annotations.DefaultAnnotation; import edu.umd.cs.findbugs.annotations.NonNull; /** * Reads and writes {@link FluoQuery} instances and their components to/from * a Fluo table. * <p> * Note, this class should be implemented in a thread-safe manner due to current usage. */ @DefaultAnnotation(NonNull.class) public class FluoQueryMetadataDAO { /** * Write an instance of {@link QueryMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Query node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final QueryMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final Joiner joiner = Joiner.on(IncrementalUpdateConstants.VAR_DELIM); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.QUERY_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.QUERY_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.QUERY_SPARQL, metadata.getSparql() ); tx.set(rowId, FluoQueryColumns.QUERY_EXPORT_STRATEGIES, joiner.join(metadata.getExportStrategies())); tx.set(rowId, FluoQueryColumns.QUERY_TYPE, metadata.getQueryType().toString()); tx.set(rowId, FluoQueryColumns.QUERY_CHILD_NODE_ID, metadata.getChildNodeId() ); } /** * Read an instance of {@link QueryMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata . (not null) * @param nodeId - The nodeId of the Query node that will be read. (not nul) * @return The {@link QueryMetadata} that was read from the table. */ public QueryMetadata readQueryMetadata(final SnapshotBase sx, final String nodeId) { return readQueryMetadataBuilder(sx, nodeId).build(); } private QueryMetadata.Builder readQueryMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.QUERY_VARIABLE_ORDER, FluoQueryColumns.QUERY_SPARQL, FluoQueryColumns.QUERY_TYPE, FluoQueryColumns.QUERY_EXPORT_STRATEGIES, FluoQueryColumns.QUERY_CHILD_NODE_ID); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.QUERY_VARIABLE_ORDER); final VariableOrder varOrder = new VariableOrder(varOrderString); final String sparql = values.get(FluoQueryColumns.QUERY_SPARQL); final String childNodeId = values.get(FluoQueryColumns.QUERY_CHILD_NODE_ID); final String queryType = values.get(FluoQueryColumns.QUERY_TYPE); final String[] exportStrategies = values.get(FluoQueryColumns.QUERY_EXPORT_STRATEGIES).split(IncrementalUpdateConstants.VAR_DELIM); final Set<ExportStrategy> strategies = new HashSet<>(); for (final String strategy : exportStrategies) { if (!strategy.isEmpty()) { strategies.add(ExportStrategy.valueOf(strategy)); } } return QueryMetadata.builder(nodeId) .setVarOrder( varOrder ) .setSparql( sparql ) .setExportStrategies(strategies) .setQueryType(QueryType.valueOf(queryType)) .setChildNodeId( childNodeId ); } /** * Write an instance of {@link ProjectionMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Query node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final ProjectionMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.PROJECTION_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.PROJECTION_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.PROJECTION_PROJECTED_VARS, metadata.getProjectedVars().toString()); tx.set(rowId, FluoQueryColumns.PROJECTION_PARENT_NODE_ID, metadata.getParentNodeId()); tx.set(rowId, FluoQueryColumns.PROJECTION_CHILD_NODE_ID, metadata.getChildNodeId() ); } /** * Read an instance of {@link ProjectionMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata . (not null) * @param nodeId - The nodeId of the Projection node that will be read. (not null) * @return The {@link ProjectionMetadata} that was read from the table. */ public ProjectionMetadata readProjectionMetadata(final SnapshotBase sx, final String nodeId) { return readProjectionMetadataBuilder(sx, nodeId).build(); } private ProjectionMetadata.Builder readProjectionMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.PROJECTION_VARIABLE_ORDER, FluoQueryColumns.PROJECTION_PROJECTED_VARS, FluoQueryColumns.PROJECTION_PARENT_NODE_ID, FluoQueryColumns.PROJECTION_CHILD_NODE_ID); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.PROJECTION_VARIABLE_ORDER); final String projectedVarString = values.get(FluoQueryColumns.PROJECTION_PROJECTED_VARS); final VariableOrder varOrder = new VariableOrder(varOrderString); final VariableOrder projectedVars = new VariableOrder(projectedVarString); final String childNodeId = values.get(FluoQueryColumns.PROJECTION_CHILD_NODE_ID); final String parentNodeId = values.get(FluoQueryColumns.PROJECTION_PARENT_NODE_ID); return ProjectionMetadata.builder(nodeId) .setVarOrder( varOrder ) .setProjectedVars(projectedVars) .setParentNodeId(parentNodeId) .setChildNodeId( childNodeId ); } /** * Write an instance of {@link ConstructQueryMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Construct Query node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final ConstructQueryMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.CONSTRUCT_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.CONSTRUCT_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.CONSTRUCT_CHILD_NODE_ID, metadata.getChildNodeId() ); tx.set(rowId, FluoQueryColumns.CONSTRUCT_PARENT_NODE_ID, metadata.getParentNodeId() ); tx.set(rowId, FluoQueryColumns.CONSTRUCT_GRAPH, ConstructGraphSerializer.toConstructString(metadata.getConstructGraph())); } /** * Read an instance of {@link ConstructQueryMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata . (not null) * @param nodeId - The nodeId of the Construct Query node that will be read. (not null) * @return The {@link ConstructQueryMetadata} that was read from table. */ public ConstructQueryMetadata readConstructQueryMetadata(final SnapshotBase sx, final String nodeId) { return readConstructQueryMetadataBuilder(sx, nodeId).build(); } private ConstructQueryMetadata.Builder readConstructQueryMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.CONSTRUCT_GRAPH, FluoQueryColumns.CONSTRUCT_CHILD_NODE_ID, FluoQueryColumns.CONSTRUCT_PARENT_NODE_ID, FluoQueryColumns.CONSTRUCT_VARIABLE_ORDER); final String graphString = values.get(FluoQueryColumns.CONSTRUCT_GRAPH); final ConstructGraph graph = ConstructGraphSerializer.toConstructGraph(graphString); final String childNodeId = values.get(FluoQueryColumns.CONSTRUCT_CHILD_NODE_ID); final String parentNodeId = values.get(FluoQueryColumns.CONSTRUCT_PARENT_NODE_ID); final String varOrderString = values.get(FluoQueryColumns.CONSTRUCT_VARIABLE_ORDER); return ConstructQueryMetadata.builder() .setNodeId(nodeId) .setParentNodeId(parentNodeId) .setConstructGraph(graph) .setVarOrder(new VariableOrder(varOrderString)) .setChildNodeId(childNodeId); } /** * Write an instance of {@link FilterMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Filter node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final FilterMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.FILTER_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.FILTER_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.FILTER_SPARQL, metadata.getFilterSparql() ); tx.set(rowId, FluoQueryColumns.FILTER_PARENT_NODE_ID, metadata.getParentNodeId() ); tx.set(rowId, FluoQueryColumns.FILTER_CHILD_NODE_ID, metadata.getChildNodeId() ); } /** * Read an instance of {@link FilterMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata. (not null) * @param nodeId - The nodeId of the Filter node that will be read. (not nul) * @return The {@link FilterMetadata} that was read from the table. */ public FilterMetadata readFilterMetadata(final SnapshotBase sx, final String nodeId) { return readFilterMetadataBuilder(sx, nodeId).build(); } private FilterMetadata.Builder readFilterMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.FILTER_VARIABLE_ORDER, FluoQueryColumns.FILTER_SPARQL, FluoQueryColumns.FILTER_PARENT_NODE_ID, FluoQueryColumns.FILTER_CHILD_NODE_ID); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.FILTER_VARIABLE_ORDER); final VariableOrder varOrder = new VariableOrder(varOrderString); final String originalSparql = values.get(FluoQueryColumns.FILTER_SPARQL); final String parentNodeId = values.get(FluoQueryColumns.FILTER_PARENT_NODE_ID); final String childNodeId = values.get(FluoQueryColumns.FILTER_CHILD_NODE_ID); return FilterMetadata.builder(nodeId).setVarOrder(varOrder).setFilterSparql(originalSparql) .setParentNodeId(parentNodeId).setChildNodeId(childNodeId); } /** * Write an instance of {@link PeriodicQueryMetadata} to the Fluo table. * * @param tx * - The transaction that will be used to commit the metadata. * (not null) * @param metadata * - The PeriodicBin node metadata that will be written to the * table. (not null) */ public void write(final TransactionBase tx, final PeriodicQueryMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_PARENT_NODE_ID, metadata.getParentNodeId()); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_CHILD_NODE_ID, metadata.getChildNodeId()); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_PERIOD, Long.toString(metadata.getPeriod())); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_WINDOWSIZE, Long.toString(metadata.getWindowSize())); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_TIMEUNIT, metadata.getUnit().name()); tx.set(rowId, FluoQueryColumns.PERIODIC_QUERY_TEMPORAL_VARIABLE, metadata.getTemporalVariable()); } /** * Read an instance of {@link PeriodicQueryMetadata} from the Fluo table. * * @param sx * - The snapshot that will be used to read the metadata. (not * null) * @param nodeId * - The nodeId of the PeriodicBin node that will be read. (not * null) * @return The {@link PeriodicQueryMetadata} that was read from table. */ public PeriodicQueryMetadata readPeriodicQueryMetadata(final SnapshotBase sx, final String nodeId) { return readPeriodicQueryMetadataBuilder(sx, nodeId).build(); } private PeriodicQueryMetadata.Builder readPeriodicQueryMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.PERIODIC_QUERY_VARIABLE_ORDER, FluoQueryColumns.PERIODIC_QUERY_PARENT_NODE_ID, FluoQueryColumns.PERIODIC_QUERY_CHILD_NODE_ID, FluoQueryColumns.PERIODIC_QUERY_PERIOD, FluoQueryColumns.PERIODIC_QUERY_WINDOWSIZE, FluoQueryColumns.PERIODIC_QUERY_TIMEUNIT, FluoQueryColumns.PERIODIC_QUERY_TEMPORAL_VARIABLE); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.PERIODIC_QUERY_VARIABLE_ORDER); final VariableOrder varOrder = new VariableOrder(varOrderString); final String parentNodeId = values.get(FluoQueryColumns.PERIODIC_QUERY_PARENT_NODE_ID); final String childNodeId = values.get(FluoQueryColumns.PERIODIC_QUERY_CHILD_NODE_ID); final String temporalVariable = values.get(FluoQueryColumns.PERIODIC_QUERY_TEMPORAL_VARIABLE); final String period = values.get(FluoQueryColumns.PERIODIC_QUERY_PERIOD); final String window = values.get(FluoQueryColumns.PERIODIC_QUERY_WINDOWSIZE); final String timeUnit = values.get(FluoQueryColumns.PERIODIC_QUERY_TIMEUNIT); return PeriodicQueryMetadata.builder() .setNodeId(nodeId) .setVarOrder(varOrder) .setParentNodeId(parentNodeId) .setChildNodeId(childNodeId) .setWindowSize(Long.parseLong(window)) .setPeriod(Long.parseLong(period)) .setTemporalVariable(temporalVariable) .setUnit(TimeUnit.valueOf(timeUnit)); } /** * Write an instance of {@link JoinMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Join node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final JoinMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.JOIN_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.JOIN_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.JOIN_TYPE, metadata.getJoinType().toString() ); tx.set(rowId, FluoQueryColumns.JOIN_PARENT_NODE_ID, metadata.getParentNodeId() ); tx.set(rowId, FluoQueryColumns.JOIN_LEFT_CHILD_NODE_ID, metadata.getLeftChildNodeId() ); tx.set(rowId, FluoQueryColumns.JOIN_BATCH_SIZE, Integer.toString(metadata.getJoinBatchSize())); tx.set(rowId, FluoQueryColumns.JOIN_RIGHT_CHILD_NODE_ID, metadata.getRightChildNodeId() ); } /** * Read an instance of {@link JoinMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata. (not null) * @param nodeId - The nodeId of the Join node that will be read. (not nul) * @return The {@link JoinMetadata} that was read from the table. */ public JoinMetadata readJoinMetadata(final SnapshotBase sx, final String nodeId) { return readJoinMetadataBuilder(sx, nodeId).build(); } private JoinMetadata.Builder readJoinMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.JOIN_VARIABLE_ORDER, FluoQueryColumns.JOIN_TYPE, FluoQueryColumns.JOIN_PARENT_NODE_ID, FluoQueryColumns.JOIN_LEFT_CHILD_NODE_ID, FluoQueryColumns.JOIN_BATCH_SIZE, FluoQueryColumns.JOIN_RIGHT_CHILD_NODE_ID); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.JOIN_VARIABLE_ORDER); final VariableOrder varOrder = new VariableOrder(varOrderString); final String joinTypeString = values.get(FluoQueryColumns.JOIN_TYPE); final JoinType joinType = JoinType.valueOf(joinTypeString); final String parentNodeId = values.get(FluoQueryColumns.JOIN_PARENT_NODE_ID); final String leftChildNodeId = values.get(FluoQueryColumns.JOIN_LEFT_CHILD_NODE_ID); final int joinBatchSize = Integer.parseInt(values.get(FluoQueryColumns.JOIN_BATCH_SIZE)); final String rightChildNodeId = values.get(FluoQueryColumns.JOIN_RIGHT_CHILD_NODE_ID); return JoinMetadata.builder(nodeId) .setVarOrder(varOrder) .setJoinType(joinType) .setParentNodeId(parentNodeId) .setJoinBatchSize(joinBatchSize) .setLeftChildNodeId(leftChildNodeId) .setRightChildNodeId(rightChildNodeId); } /** * Write an instance of {@link StatementPatternMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Statement Pattern node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final StatementPatternMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.STATEMENT_PATTERN_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.STATEMENT_PATTERN_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.STATEMENT_PATTERN_PATTERN, metadata.getStatementPattern() ); tx.set(rowId, FluoQueryColumns.STATEMENT_PATTERN_PARENT_NODE_ID, metadata.getParentNodeId()); } /** * Read an instance of {@link StatementPatternMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata. (not null) * @param nodeId - The nodeId of the Statement Pattern node that will be read. (not nul) * @return The {@link StatementPatternMetadata} that was read from the table. */ public StatementPatternMetadata readStatementPatternMetadata(final SnapshotBase sx, final String nodeId) { return readStatementPatternMetadataBuilder(sx, nodeId).build(); } private StatementPatternMetadata.Builder readStatementPatternMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.STATEMENT_PATTERN_VARIABLE_ORDER, FluoQueryColumns.STATEMENT_PATTERN_PATTERN, FluoQueryColumns.STATEMENT_PATTERN_PARENT_NODE_ID); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.STATEMENT_PATTERN_VARIABLE_ORDER); final VariableOrder varOrder = new VariableOrder(varOrderString); final String pattern = values.get(FluoQueryColumns.STATEMENT_PATTERN_PATTERN); final String parentNodeId = values.get(FluoQueryColumns.STATEMENT_PATTERN_PARENT_NODE_ID); return StatementPatternMetadata.builder(nodeId).setVarOrder(varOrder).setStatementPattern(pattern).setParentNodeId(parentNodeId); } /** * Write an instance of {@link AggregationMetadata} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param metadata - The Aggregation node metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final AggregationMetadata metadata) { requireNonNull(tx); requireNonNull(metadata); final String rowId = metadata.getNodeId(); tx.set(rowId, FluoQueryColumns.AGGREGATION_NODE_ID, rowId); tx.set(rowId, FluoQueryColumns.AGGREGATION_VARIABLE_ORDER, metadata.getVariableOrder().toString()); tx.set(rowId, FluoQueryColumns.AGGREGATION_PARENT_NODE_ID, metadata.getParentNodeId()); tx.set(rowId, FluoQueryColumns.AGGREGATION_CHILD_NODE_ID, metadata.getChildNodeId()); // Store the Group By variable order. final VariableOrder groupByVars = metadata.getGroupByVariableOrder(); final String groupByString = Joiner.on(";").join(groupByVars.getVariableOrders()); tx.set(rowId, FluoQueryColumns.AGGREGATION_GROUP_BY_BINDING_NAMES, groupByString); // Serialize the collection of AggregationElements. final ByteArrayOutputStream baos = new ByteArrayOutputStream(); try(final ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject( metadata.getAggregations() ); } catch (final IOException e) { throw new RuntimeException("Problem encountered while writing AggregationMetadata to the Fluo table. Unable " + "to serialize the AggregationElements to a byte[].", e); } tx.set(Bytes.of(rowId.getBytes(Charsets.UTF_8)), FluoQueryColumns.AGGREGATION_AGGREGATIONS, Bytes.of(baos.toByteArray())); } /** * Read an instance of {@link AggregationMetadata} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata. (not null) * @param nodeId - The nodeId of the Aggregation node that will be read. (not null) * @return The {@link AggregationMetadata} that was read from the table. */ public AggregationMetadata readAggregationMetadata(final SnapshotBase sx, final String nodeId) { return readAggregationMetadataBuilder(sx, nodeId).build(); } private AggregationMetadata.Builder readAggregationMetadataBuilder(final SnapshotBase sx, final String nodeId) { requireNonNull(sx); requireNonNull(nodeId); // Fetch the values from the Fluo table. final String rowId = nodeId; final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.AGGREGATION_VARIABLE_ORDER, FluoQueryColumns.AGGREGATION_PARENT_NODE_ID, FluoQueryColumns.AGGREGATION_CHILD_NODE_ID, FluoQueryColumns.AGGREGATION_GROUP_BY_BINDING_NAMES); // Return an object holding them. final String varOrderString = values.get(FluoQueryColumns.AGGREGATION_VARIABLE_ORDER); final VariableOrder varOrder = new VariableOrder(varOrderString); final String parentNodeId = values.get(FluoQueryColumns.AGGREGATION_PARENT_NODE_ID); final String childNodeId = values.get(FluoQueryColumns.AGGREGATION_CHILD_NODE_ID); // Read the Group By variable order if one was present. final String groupByString = values.get(FluoQueryColumns.AGGREGATION_GROUP_BY_BINDING_NAMES); final VariableOrder groupByVars = groupByString.isEmpty() ? new VariableOrder() : new VariableOrder( groupByString.split(";") ); // Deserialize the collection of AggregationElements. final Bytes aggBytes = sx.get(Bytes.of(nodeId.getBytes(Charsets.UTF_8)), FluoQueryColumns.AGGREGATION_AGGREGATIONS); final Collection<AggregationElement> aggregations; try (final ValidatingObjectInputStream vois = new ValidatingObjectInputStream(aggBytes.toInputStream()) //// this is how you find classes that you missed in the vois.accept() list, below. // { @Override protected void invalidClassNameFound(String className) throws java.io.InvalidClassException { // System.out.println("vois.accept(" + className + ".class, ");};}; ) { // These classes are allowed to be deserialized. Others throw InvalidClassException. vois.accept(java.util.ArrayList.class, java.lang.Enum.class, AggregationElement.class, AggregationType.class); final Object object = vois.readObject(); if (!(object instanceof Collection<?>)) { throw new InvalidClassException("Object read was not of type Collection. It was: " + object.getClass()); } aggregations = (Collection<AggregationElement>) object; } catch (final IOException | ClassNotFoundException e) { throw new RuntimeException("Problem encountered while reading AggregationMetadata from the Fluo table. Unable " + "to deserialize the AggregationElements from a byte[].", e); } final AggregationMetadata.Builder builder = AggregationMetadata.builder(nodeId) .setVarOrder(varOrder) .setParentNodeId(parentNodeId) .setChildNodeId(childNodeId) .setGroupByVariableOrder(groupByVars); for(final AggregationElement aggregation : aggregations) { builder.addAggregation(aggregation); } return builder; } /** * Write an instance of {@link FluoQuery} to the Fluo table. * * @param tx - The transaction that will be used to commit the metadata. (not null) * @param query - The query metadata that will be written to the table. (not null) */ public void write(final TransactionBase tx, final FluoQuery query) { requireNonNull(tx); requireNonNull(query); // The results of the query are eventually exported to an instance // of Rya, so store the Rya ID for the PCJ. write(tx, query.getQueryMetadata()); // Write the rest of the metadata objects. if (query.getQueryType() == QueryType.CONSTRUCT) { final ConstructQueryMetadata constructMetadata = query.getConstructQueryMetadata().get(); write(tx, constructMetadata); } for(final ProjectionMetadata projection : query.getProjectionMetadata()) { write(tx, projection); } final Optional<PeriodicQueryMetadata> periodicMetadata = query.getPeriodicQueryMetadata(); if(periodicMetadata.isPresent()) { write(tx, periodicMetadata.get()); } for(final FilterMetadata filter : query.getFilterMetadata()) { write(tx, filter); } for(final JoinMetadata join : query.getJoinMetadata()) { write(tx, join); } Set<String> ids = new HashSet<>(); for(final StatementPatternMetadata statementPattern : query.getStatementPatternMetadata()) { write(tx, statementPattern); ids.add(statementPattern.getNodeId()); } StatementPatternIdManager.addStatementPatternIds(tx, Sets.newHashSet(ids)); for(final AggregationMetadata aggregation : query.getAggregationMetadata()) { write(tx, aggregation); } } /** * Read an instance of {@link FluoQuery} from the Fluo table. * * @param sx - The snapshot that will be used to read the metadata from the Fluo table. (not null) * @param queryId - The ID of the query whose nodes will be read. (not null) * @return The {@link FluoQuery} that was read from table. * @throws UnsupportedQueryException */ public FluoQuery readFluoQuery(final SnapshotBase sx, final String queryId) throws UnsupportedQueryException { requireNonNull(sx); requireNonNull(queryId); final FluoQuery.Builder fluoQueryBuilder = FluoQuery.builder(); addChildMetadata(sx, fluoQueryBuilder, queryId); return fluoQueryBuilder.build(); } private void addChildMetadata(final SnapshotBase sx, final FluoQuery.Builder builder, final String childNodeId) { requireNonNull(sx); requireNonNull(builder); requireNonNull(childNodeId); final NodeType childType = NodeType.fromNodeId(childNodeId).get(); switch (childType) { case QUERY: // Add this node's metadata. final QueryMetadata.Builder queryBuilder = readQueryMetadataBuilder(sx, childNodeId); builder.setQueryMetadata(queryBuilder); // Add it's child's metadata. addChildMetadata(sx, builder, queryBuilder.build().getChildNodeId()); break; case PROJECTION: //Add this node's metadata final ProjectionMetadata.Builder projectionBuilder = readProjectionMetadataBuilder(sx, childNodeId); builder.addProjectionBuilder(projectionBuilder); //Add it's child's metadata addChildMetadata(sx, builder, projectionBuilder.build().getChildNodeId()); break; case CONSTRUCT: final ConstructQueryMetadata.Builder constructBuilder = readConstructQueryMetadataBuilder(sx, childNodeId); builder.setConstructQueryMetadata(constructBuilder); // Add it's child's metadata. addChildMetadata(sx, builder, constructBuilder.build().getChildNodeId()); break; case PERIODIC_QUERY: // Add this node's metadata. final PeriodicQueryMetadata.Builder periodicQueryBuilder = readPeriodicQueryMetadataBuilder(sx, childNodeId); builder.addPeriodicQueryMetadata(periodicQueryBuilder); // Add it's child's metadata. addChildMetadata(sx, builder, periodicQueryBuilder.build().getChildNodeId()); break; case AGGREGATION: // Add this node's metadata. final AggregationMetadata.Builder aggregationBuilder = readAggregationMetadataBuilder(sx, childNodeId); builder.addAggregateMetadata(aggregationBuilder); // Add it's child's metadata. addChildMetadata(sx, builder, aggregationBuilder.build().getChildNodeId()); break; case JOIN: // Add this node's metadata. final JoinMetadata.Builder joinBuilder = readJoinMetadataBuilder(sx, childNodeId); builder.addJoinMetadata(joinBuilder); // Add it's children's metadata. final JoinMetadata joinMetadata = joinBuilder.build(); addChildMetadata(sx, builder, joinMetadata.getLeftChildNodeId()); addChildMetadata(sx, builder, joinMetadata.getRightChildNodeId()); break; case FILTER: // Add this node's metadata. final FilterMetadata.Builder filterBuilder = readFilterMetadataBuilder(sx, childNodeId); builder.addFilterMetadata(filterBuilder); // Add it's child's metadata. addChildMetadata(sx, builder, filterBuilder.build().getChildNodeId()); break; case STATEMENT_PATTERN: // Add this node's metadata. final StatementPatternMetadata.Builder spBuilder = readStatementPatternMetadataBuilder(sx, childNodeId); builder.addStatementPatternBuilder(spBuilder); break; default: break; } } }
googleapis/google-cloud-java
34,941
java-functions/proto-google-cloud-functions-v2/src/main/java/com/google/cloud/functions/v2/StorageSource.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/functions/v2/functions.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.functions.v2; /** * * * <pre> * Location of the source in an archive file in Google Cloud Storage. * </pre> * * Protobuf type {@code google.cloud.functions.v2.StorageSource} */ public final class StorageSource extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.functions.v2.StorageSource) StorageSourceOrBuilder { private static final long serialVersionUID = 0L; // Use StorageSource.newBuilder() to construct. private StorageSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private StorageSource() { bucket_ = ""; object_ = ""; sourceUploadUrl_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new StorageSource(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_StorageSource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_StorageSource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2.StorageSource.class, com.google.cloud.functions.v2.StorageSource.Builder.class); } public static final int BUCKET_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object bucket_ = ""; /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bucket. */ @java.lang.Override public java.lang.String getBucket() { java.lang.Object ref = bucket_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bucket_ = s; return s; } } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bytes for bucket. */ @java.lang.Override public com.google.protobuf.ByteString getBucketBytes() { java.lang.Object ref = bucket_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bucket_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OBJECT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object object_ = ""; /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The object. */ @java.lang.Override public java.lang.String getObject() { java.lang.Object ref = object_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); object_ = s; return s; } } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The bytes for object. */ @java.lang.Override public com.google.protobuf.ByteString getObjectBytes() { java.lang.Object ref = object_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); object_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GENERATION_FIELD_NUMBER = 3; private long generation_ = 0L; /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @return The generation. */ @java.lang.Override public long getGeneration() { return generation_; } public static final int SOURCE_UPLOAD_URL_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object sourceUploadUrl_ = ""; /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The sourceUploadUrl. */ @java.lang.Override public java.lang.String getSourceUploadUrl() { java.lang.Object ref = sourceUploadUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceUploadUrl_ = s; return s; } } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The bytes for sourceUploadUrl. */ @java.lang.Override public com.google.protobuf.ByteString getSourceUploadUrlBytes() { java.lang.Object ref = sourceUploadUrl_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceUploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bucket_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bucket_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(object_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, object_); } if (generation_ != 0L) { output.writeInt64(3, generation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceUploadUrl_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, sourceUploadUrl_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bucket_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bucket_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(object_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, object_); } if (generation_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, generation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceUploadUrl_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, sourceUploadUrl_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.functions.v2.StorageSource)) { return super.equals(obj); } com.google.cloud.functions.v2.StorageSource other = (com.google.cloud.functions.v2.StorageSource) obj; if (!getBucket().equals(other.getBucket())) return false; if (!getObject().equals(other.getObject())) return false; if (getGeneration() != other.getGeneration()) return false; if (!getSourceUploadUrl().equals(other.getSourceUploadUrl())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + BUCKET_FIELD_NUMBER; hash = (53 * hash) + getBucket().hashCode(); hash = (37 * hash) + OBJECT_FIELD_NUMBER; hash = (53 * hash) + getObject().hashCode(); hash = (37 * hash) + GENERATION_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getGeneration()); hash = (37 * hash) + SOURCE_UPLOAD_URL_FIELD_NUMBER; hash = (53 * hash) + getSourceUploadUrl().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.functions.v2.StorageSource parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2.StorageSource parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2.StorageSource parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2.StorageSource parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2.StorageSource parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2.StorageSource parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2.StorageSource parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2.StorageSource parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2.StorageSource parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.functions.v2.StorageSource parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2.StorageSource parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2.StorageSource parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.functions.v2.StorageSource prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Location of the source in an archive file in Google Cloud Storage. * </pre> * * Protobuf type {@code google.cloud.functions.v2.StorageSource} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.functions.v2.StorageSource) com.google.cloud.functions.v2.StorageSourceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_StorageSource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_StorageSource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2.StorageSource.class, com.google.cloud.functions.v2.StorageSource.Builder.class); } // Construct using com.google.cloud.functions.v2.StorageSource.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; bucket_ = ""; object_ = ""; generation_ = 0L; sourceUploadUrl_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.functions.v2.FunctionsProto .internal_static_google_cloud_functions_v2_StorageSource_descriptor; } @java.lang.Override public com.google.cloud.functions.v2.StorageSource getDefaultInstanceForType() { return com.google.cloud.functions.v2.StorageSource.getDefaultInstance(); } @java.lang.Override public com.google.cloud.functions.v2.StorageSource build() { com.google.cloud.functions.v2.StorageSource result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.functions.v2.StorageSource buildPartial() { com.google.cloud.functions.v2.StorageSource result = new com.google.cloud.functions.v2.StorageSource(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.functions.v2.StorageSource result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.bucket_ = bucket_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.object_ = object_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.generation_ = generation_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.sourceUploadUrl_ = sourceUploadUrl_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.functions.v2.StorageSource) { return mergeFrom((com.google.cloud.functions.v2.StorageSource) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.functions.v2.StorageSource other) { if (other == com.google.cloud.functions.v2.StorageSource.getDefaultInstance()) return this; if (!other.getBucket().isEmpty()) { bucket_ = other.bucket_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getObject().isEmpty()) { object_ = other.object_; bitField0_ |= 0x00000002; onChanged(); } if (other.getGeneration() != 0L) { setGeneration(other.getGeneration()); } if (!other.getSourceUploadUrl().isEmpty()) { sourceUploadUrl_ = other.sourceUploadUrl_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { bucket_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { object_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { generation_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { sourceUploadUrl_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object bucket_ = ""; /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bucket. */ public java.lang.String getBucket() { java.lang.Object ref = bucket_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bucket_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bytes for bucket. */ public com.google.protobuf.ByteString getBucketBytes() { java.lang.Object ref = bucket_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bucket_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @param value The bucket to set. * @return This builder for chaining. */ public Builder setBucket(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bucket_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return This builder for chaining. */ public Builder clearBucket() { bucket_ = getDefaultInstance().getBucket(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @param value The bytes for bucket to set. * @return This builder for chaining. */ public Builder setBucketBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bucket_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object object_ = ""; /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The object. */ public java.lang.String getObject() { java.lang.Object ref = object_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); object_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The bytes for object. */ public com.google.protobuf.ByteString getObjectBytes() { java.lang.Object ref = object_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); object_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @param value The object to set. * @return This builder for chaining. */ public Builder setObject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } object_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return This builder for chaining. */ public Builder clearObject() { object_ = getDefaultInstance().getObject(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @param value The bytes for object to set. * @return This builder for chaining. */ public Builder setObjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); object_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private long generation_; /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @return The generation. */ @java.lang.Override public long getGeneration() { return generation_; } /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @param value The generation to set. * @return This builder for chaining. */ public Builder setGeneration(long value) { generation_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @return This builder for chaining. */ public Builder clearGeneration() { bitField0_ = (bitField0_ & ~0x00000004); generation_ = 0L; onChanged(); return this; } private java.lang.Object sourceUploadUrl_ = ""; /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The sourceUploadUrl. */ public java.lang.String getSourceUploadUrl() { java.lang.Object ref = sourceUploadUrl_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceUploadUrl_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The bytes for sourceUploadUrl. */ public com.google.protobuf.ByteString getSourceUploadUrlBytes() { java.lang.Object ref = sourceUploadUrl_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceUploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @param value The sourceUploadUrl to set. * @return This builder for chaining. */ public Builder setSourceUploadUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } sourceUploadUrl_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return This builder for chaining. */ public Builder clearSourceUploadUrl() { sourceUploadUrl_ = getDefaultInstance().getSourceUploadUrl(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @param value The bytes for sourceUploadUrl to set. * @return This builder for chaining. */ public Builder setSourceUploadUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sourceUploadUrl_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.functions.v2.StorageSource) } // @@protoc_insertion_point(class_scope:google.cloud.functions.v2.StorageSource) private static final com.google.cloud.functions.v2.StorageSource DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.functions.v2.StorageSource(); } public static com.google.cloud.functions.v2.StorageSource getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<StorageSource> PARSER = new com.google.protobuf.AbstractParser<StorageSource>() { @java.lang.Override public StorageSource parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<StorageSource> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<StorageSource> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.functions.v2.StorageSource getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,044
java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/UpdateWorkflowConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataform/v1beta1/dataform.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataform.v1beta1; /** * * * <pre> * `UpdateWorkflowConfig` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest} */ public final class UpdateWorkflowConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) UpdateWorkflowConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateWorkflowConfigRequest.newBuilder() to construct. private UpdateWorkflowConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateWorkflowConfigRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateWorkflowConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_UpdateWorkflowConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_UpdateWorkflowConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.class, com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int WORKFLOW_CONFIG_FIELD_NUMBER = 2; private com.google.cloud.dataform.v1beta1.WorkflowConfig workflowConfig_; /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workflowConfig field is set. */ @java.lang.Override public boolean hasWorkflowConfig() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workflowConfig. */ @java.lang.Override public com.google.cloud.dataform.v1beta1.WorkflowConfig getWorkflowConfig() { return workflowConfig_ == null ? com.google.cloud.dataform.v1beta1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataform.v1beta1.WorkflowConfigOrBuilder getWorkflowConfigOrBuilder() { return workflowConfig_ == null ? com.google.cloud.dataform.v1beta1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getWorkflowConfig()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWorkflowConfig()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest)) { return super.equals(obj); } com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest other = (com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasWorkflowConfig() != other.hasWorkflowConfig()) return false; if (hasWorkflowConfig()) { if (!getWorkflowConfig().equals(other.getWorkflowConfig())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasWorkflowConfig()) { hash = (37 * hash) + WORKFLOW_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getWorkflowConfig().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * `UpdateWorkflowConfig` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_UpdateWorkflowConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_UpdateWorkflowConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.class, com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.Builder.class); } // Construct using com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getWorkflowConfigFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } workflowConfig_ = null; if (workflowConfigBuilder_ != null) { workflowConfigBuilder_.dispose(); workflowConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataform.v1beta1.DataformProto .internal_static_google_cloud_dataform_v1beta1_UpdateWorkflowConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest getDefaultInstanceForType() { return com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest build() { com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest buildPartial() { com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest result = new com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.workflowConfig_ = workflowConfigBuilder_ == null ? workflowConfig_ : workflowConfigBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) { return mergeFrom((com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest other) { if (other == com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasWorkflowConfig()) { mergeWorkflowConfig(other.getWorkflowConfig()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getWorkflowConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Specifies the fields to be updated in the workflow config. If * left unset, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.dataform.v1beta1.WorkflowConfig workflowConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.WorkflowConfig, com.google.cloud.dataform.v1beta1.WorkflowConfig.Builder, com.google.cloud.dataform.v1beta1.WorkflowConfigOrBuilder> workflowConfigBuilder_; /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workflowConfig field is set. */ public boolean hasWorkflowConfig() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workflowConfig. */ public com.google.cloud.dataform.v1beta1.WorkflowConfig getWorkflowConfig() { if (workflowConfigBuilder_ == null) { return workflowConfig_ == null ? com.google.cloud.dataform.v1beta1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } else { return workflowConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkflowConfig(com.google.cloud.dataform.v1beta1.WorkflowConfig value) { if (workflowConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } workflowConfig_ = value; } else { workflowConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkflowConfig( com.google.cloud.dataform.v1beta1.WorkflowConfig.Builder builderForValue) { if (workflowConfigBuilder_ == null) { workflowConfig_ = builderForValue.build(); } else { workflowConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeWorkflowConfig(com.google.cloud.dataform.v1beta1.WorkflowConfig value) { if (workflowConfigBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && workflowConfig_ != null && workflowConfig_ != com.google.cloud.dataform.v1beta1.WorkflowConfig.getDefaultInstance()) { getWorkflowConfigBuilder().mergeFrom(value); } else { workflowConfig_ = value; } } else { workflowConfigBuilder_.mergeFrom(value); } if (workflowConfig_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearWorkflowConfig() { bitField0_ = (bitField0_ & ~0x00000002); workflowConfig_ = null; if (workflowConfigBuilder_ != null) { workflowConfigBuilder_.dispose(); workflowConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataform.v1beta1.WorkflowConfig.Builder getWorkflowConfigBuilder() { bitField0_ |= 0x00000002; onChanged(); return getWorkflowConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataform.v1beta1.WorkflowConfigOrBuilder getWorkflowConfigOrBuilder() { if (workflowConfigBuilder_ != null) { return workflowConfigBuilder_.getMessageOrBuilder(); } else { return workflowConfig_ == null ? com.google.cloud.dataform.v1beta1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } } /** * * * <pre> * Required. The workflow config to update. * </pre> * * <code> * .google.cloud.dataform.v1beta1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.WorkflowConfig, com.google.cloud.dataform.v1beta1.WorkflowConfig.Builder, com.google.cloud.dataform.v1beta1.WorkflowConfigOrBuilder> getWorkflowConfigFieldBuilder() { if (workflowConfigBuilder_ == null) { workflowConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1beta1.WorkflowConfig, com.google.cloud.dataform.v1beta1.WorkflowConfig.Builder, com.google.cloud.dataform.v1beta1.WorkflowConfigOrBuilder>( getWorkflowConfig(), getParentForChildren(), isClean()); workflowConfig_ = null; } return workflowConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest) private static final com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest(); } public static com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateWorkflowConfigRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateWorkflowConfigRequest>() { @java.lang.Override public UpdateWorkflowConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateWorkflowConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateWorkflowConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataform.v1beta1.UpdateWorkflowConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,200
java-discoveryengine/grpc-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/EvaluationServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.discoveryengine.v1alpha; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/discoveryengine/v1alpha/evaluation_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class EvaluationServiceGrpc { private EvaluationServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.discoveryengine.v1alpha.EvaluationService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest, com.google.cloud.discoveryengine.v1alpha.Evaluation> getGetEvaluationMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetEvaluation", requestType = com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest.class, responseType = com.google.cloud.discoveryengine.v1alpha.Evaluation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest, com.google.cloud.discoveryengine.v1alpha.Evaluation> getGetEvaluationMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest, com.google.cloud.discoveryengine.v1alpha.Evaluation> getGetEvaluationMethod; if ((getGetEvaluationMethod = EvaluationServiceGrpc.getGetEvaluationMethod) == null) { synchronized (EvaluationServiceGrpc.class) { if ((getGetEvaluationMethod = EvaluationServiceGrpc.getGetEvaluationMethod) == null) { EvaluationServiceGrpc.getGetEvaluationMethod = getGetEvaluationMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest, com.google.cloud.discoveryengine.v1alpha.Evaluation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetEvaluation")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.Evaluation .getDefaultInstance())) .setSchemaDescriptor( new EvaluationServiceMethodDescriptorSupplier("GetEvaluation")) .build(); } } } return getGetEvaluationMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> getListEvaluationsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListEvaluations", requestType = com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest.class, responseType = com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> getListEvaluationsMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> getListEvaluationsMethod; if ((getListEvaluationsMethod = EvaluationServiceGrpc.getListEvaluationsMethod) == null) { synchronized (EvaluationServiceGrpc.class) { if ((getListEvaluationsMethod = EvaluationServiceGrpc.getListEvaluationsMethod) == null) { EvaluationServiceGrpc.getListEvaluationsMethod = getListEvaluationsMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListEvaluations")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse .getDefaultInstance())) .setSchemaDescriptor( new EvaluationServiceMethodDescriptorSupplier("ListEvaluations")) .build(); } } } return getListEvaluationsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest, com.google.longrunning.Operation> getCreateEvaluationMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateEvaluation", requestType = com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest, com.google.longrunning.Operation> getCreateEvaluationMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest, com.google.longrunning.Operation> getCreateEvaluationMethod; if ((getCreateEvaluationMethod = EvaluationServiceGrpc.getCreateEvaluationMethod) == null) { synchronized (EvaluationServiceGrpc.class) { if ((getCreateEvaluationMethod = EvaluationServiceGrpc.getCreateEvaluationMethod) == null) { EvaluationServiceGrpc.getCreateEvaluationMethod = getCreateEvaluationMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateEvaluation")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor( new EvaluationServiceMethodDescriptorSupplier("CreateEvaluation")) .build(); } } } return getCreateEvaluationMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> getListEvaluationResultsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListEvaluationResults", requestType = com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest.class, responseType = com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> getListEvaluationResultsMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> getListEvaluationResultsMethod; if ((getListEvaluationResultsMethod = EvaluationServiceGrpc.getListEvaluationResultsMethod) == null) { synchronized (EvaluationServiceGrpc.class) { if ((getListEvaluationResultsMethod = EvaluationServiceGrpc.getListEvaluationResultsMethod) == null) { EvaluationServiceGrpc.getListEvaluationResultsMethod = getListEvaluationResultsMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "ListEvaluationResults")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse .getDefaultInstance())) .setSchemaDescriptor( new EvaluationServiceMethodDescriptorSupplier("ListEvaluationResults")) .build(); } } } return getListEvaluationResultsMethod; } /** Creates a new async stub that supports all call types for the service */ public static EvaluationServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceStub>() { @java.lang.Override public EvaluationServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceStub(channel, callOptions); } }; return EvaluationServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static EvaluationServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceBlockingV2Stub>() { @java.lang.Override public EvaluationServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceBlockingV2Stub(channel, callOptions); } }; return EvaluationServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static EvaluationServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceBlockingStub>() { @java.lang.Override public EvaluationServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceBlockingStub(channel, callOptions); } }; return EvaluationServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static EvaluationServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<EvaluationServiceFutureStub>() { @java.lang.Override public EvaluationServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceFutureStub(channel, callOptions); } }; return EvaluationServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ public interface AsyncService { /** * * * <pre> * Gets a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ default void getEvaluation( com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest request, io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1alpha.Evaluation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetEvaluationMethod(), responseObserver); } /** * * * <pre> * Gets a list of * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s. * </pre> */ default void listEvaluations( com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest request, io.grpc.stub.StreamObserver< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListEvaluationsMethod(), responseObserver); } /** * * * <pre> * Creates a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * Upon creation, the evaluation will be automatically triggered and begin * execution. * </pre> */ default void createEvaluation( com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateEvaluationMethod(), responseObserver); } /** * * * <pre> * Gets a list of results for a given a * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ default void listEvaluationResults( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest request, io.grpc.stub.StreamObserver< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListEvaluationResultsMethod(), responseObserver); } } /** * Base class for the server implementation of the service EvaluationService. * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ public abstract static class EvaluationServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return EvaluationServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service EvaluationService. * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ public static final class EvaluationServiceStub extends io.grpc.stub.AbstractAsyncStub<EvaluationServiceStub> { private EvaluationServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EvaluationServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceStub(channel, callOptions); } /** * * * <pre> * Gets a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public void getEvaluation( com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest request, io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1alpha.Evaluation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetEvaluationMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets a list of * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s. * </pre> */ public void listEvaluations( com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest request, io.grpc.stub.StreamObserver< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListEvaluationsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * Upon creation, the evaluation will be automatically triggered and begin * execution. * </pre> */ public void createEvaluation( com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateEvaluationMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets a list of results for a given a * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public void listEvaluationResults( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest request, io.grpc.stub.StreamObserver< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListEvaluationResultsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service EvaluationService. * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ public static final class EvaluationServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<EvaluationServiceBlockingV2Stub> { private EvaluationServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EvaluationServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Gets a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public com.google.cloud.discoveryengine.v1alpha.Evaluation getEvaluation( com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetEvaluationMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a list of * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s. * </pre> */ public com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse listEvaluations( com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListEvaluationsMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * Upon creation, the evaluation will be automatically triggered and begin * execution. * </pre> */ public com.google.longrunning.Operation createEvaluation( com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateEvaluationMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a list of results for a given a * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse listEvaluationResults( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListEvaluationResultsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service EvaluationService. * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ public static final class EvaluationServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<EvaluationServiceBlockingStub> { private EvaluationServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EvaluationServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Gets a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public com.google.cloud.discoveryengine.v1alpha.Evaluation getEvaluation( com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetEvaluationMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a list of * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s. * </pre> */ public com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse listEvaluations( com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListEvaluationsMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * Upon creation, the evaluation will be automatically triggered and begin * execution. * </pre> */ public com.google.longrunning.Operation createEvaluation( com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateEvaluationMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a list of results for a given a * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse listEvaluationResults( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListEvaluationResultsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service EvaluationService. * * <pre> * Service for managing * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s, * </pre> */ public static final class EvaluationServiceFutureStub extends io.grpc.stub.AbstractFutureStub<EvaluationServiceFutureStub> { private EvaluationServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EvaluationServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EvaluationServiceFutureStub(channel, callOptions); } /** * * * <pre> * Gets a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.discoveryengine.v1alpha.Evaluation> getEvaluation(com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetEvaluationMethod(), getCallOptions()), request); } /** * * * <pre> * Gets a list of * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]s. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse> listEvaluations(com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListEvaluationsMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * Upon creation, the evaluation will be automatically triggered and begin * execution. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> createEvaluation(com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateEvaluationMethod(), getCallOptions()), request); } /** * * * <pre> * Gets a list of results for a given a * [Evaluation][google.cloud.discoveryengine.v1alpha.Evaluation]. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse> listEvaluationResults( com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListEvaluationResultsMethod(), getCallOptions()), request); } } private static final int METHODID_GET_EVALUATION = 0; private static final int METHODID_LIST_EVALUATIONS = 1; private static final int METHODID_CREATE_EVALUATION = 2; private static final int METHODID_LIST_EVALUATION_RESULTS = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_EVALUATION: serviceImpl.getEvaluation( (com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1alpha.Evaluation>) responseObserver); break; case METHODID_LIST_EVALUATIONS: serviceImpl.listEvaluations( (com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse>) responseObserver); break; case METHODID_CREATE_EVALUATION: serviceImpl.createEvaluation( (com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_LIST_EVALUATION_RESULTS: serviceImpl.listEvaluationResults( (com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetEvaluationMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1alpha.GetEvaluationRequest, com.google.cloud.discoveryengine.v1alpha.Evaluation>( service, METHODID_GET_EVALUATION))) .addMethod( getListEvaluationsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1alpha.ListEvaluationsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationsResponse>( service, METHODID_LIST_EVALUATIONS))) .addMethod( getCreateEvaluationMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1alpha.CreateEvaluationRequest, com.google.longrunning.Operation>(service, METHODID_CREATE_EVALUATION))) .addMethod( getListEvaluationResultsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsRequest, com.google.cloud.discoveryengine.v1alpha.ListEvaluationResultsResponse>( service, METHODID_LIST_EVALUATION_RESULTS))) .build(); } private abstract static class EvaluationServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { EvaluationServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.discoveryengine.v1alpha.EvaluationServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("EvaluationService"); } } private static final class EvaluationServiceFileDescriptorSupplier extends EvaluationServiceBaseDescriptorSupplier { EvaluationServiceFileDescriptorSupplier() {} } private static final class EvaluationServiceMethodDescriptorSupplier extends EvaluationServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; EvaluationServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (EvaluationServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new EvaluationServiceFileDescriptorSupplier()) .addMethod(getGetEvaluationMethod()) .addMethod(getListEvaluationsMethod()) .addMethod(getCreateEvaluationMethod()) .addMethod(getListEvaluationResultsMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
35,228
java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1alpha/stub/HttpJsonSchemaServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.discoveryengine.v1alpha.stub; import static com.google.cloud.discoveryengine.v1alpha.SchemaServiceClient.ListSchemasPagedResponse; import com.google.api.HttpRule; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.discoveryengine.v1alpha.CreateSchemaMetadata; import com.google.cloud.discoveryengine.v1alpha.CreateSchemaRequest; import com.google.cloud.discoveryengine.v1alpha.DeleteSchemaMetadata; import com.google.cloud.discoveryengine.v1alpha.DeleteSchemaRequest; import com.google.cloud.discoveryengine.v1alpha.GetSchemaRequest; import com.google.cloud.discoveryengine.v1alpha.ListSchemasRequest; import com.google.cloud.discoveryengine.v1alpha.ListSchemasResponse; import com.google.cloud.discoveryengine.v1alpha.Schema; import com.google.cloud.discoveryengine.v1alpha.UpdateSchemaMetadata; import com.google.cloud.discoveryengine.v1alpha.UpdateSchemaRequest; import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the SchemaService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class HttpJsonSchemaServiceStub extends SchemaServiceStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder() .add(Schema.getDescriptor()) .add(Empty.getDescriptor()) .add(UpdateSchemaMetadata.getDescriptor()) .add(DeleteSchemaMetadata.getDescriptor()) .add(CreateSchemaMetadata.getDescriptor()) .build(); private static final ApiMethodDescriptor<GetSchemaRequest, Schema> getSchemaMethodDescriptor = ApiMethodDescriptor.<GetSchemaRequest, Schema>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1alpha.SchemaService/GetSchema") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetSchemaRequest>newBuilder() .setPath( "/v1alpha/{name=projects/*/locations/*/dataStores/*/schemas/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setAdditionalPaths( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Schema>newBuilder() .setDefaultInstance(Schema.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListSchemasRequest, ListSchemasResponse> listSchemasMethodDescriptor = ApiMethodDescriptor.<ListSchemasRequest, ListSchemasResponse>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1alpha.SchemaService/ListSchemas") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListSchemasRequest>newBuilder() .setPath( "/v1alpha/{parent=projects/*/locations/*/dataStores/*}/schemas", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListSchemasRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setAdditionalPaths( "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListSchemasRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListSchemasResponse>newBuilder() .setDefaultInstance(ListSchemasResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<CreateSchemaRequest, Operation> createSchemaMethodDescriptor = ApiMethodDescriptor.<CreateSchemaRequest, Operation>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1alpha.SchemaService/CreateSchema") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateSchemaRequest>newBuilder() .setPath( "/v1alpha/{parent=projects/*/locations/*/dataStores/*}/schemas", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setAdditionalPaths( "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "schemaId", request.getSchemaId()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("schema", request.getSchema(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (CreateSchemaRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<UpdateSchemaRequest, Operation> updateSchemaMethodDescriptor = ApiMethodDescriptor.<UpdateSchemaRequest, Operation>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1alpha.SchemaService/UpdateSchema") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateSchemaRequest>newBuilder() .setPath( "/v1alpha/{schema.name=projects/*/locations/*/dataStores/*/schemas/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "schema.name", request.getSchema().getName()); return fields; }) .setAdditionalPaths( "/v1alpha/{schema.name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam( fields, "allowMissing", request.getAllowMissing()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("schema", request.getSchema(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateSchemaRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<DeleteSchemaRequest, Operation> deleteSchemaMethodDescriptor = ApiMethodDescriptor.<DeleteSchemaRequest, Operation>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1alpha.SchemaService/DeleteSchema") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteSchemaRequest>newBuilder() .setPath( "/v1alpha/{name=projects/*/locations/*/dataStores/*/schemas/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setAdditionalPaths( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteSchemaRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private final UnaryCallable<GetSchemaRequest, Schema> getSchemaCallable; private final UnaryCallable<ListSchemasRequest, ListSchemasResponse> listSchemasCallable; private final UnaryCallable<ListSchemasRequest, ListSchemasPagedResponse> listSchemasPagedCallable; private final UnaryCallable<CreateSchemaRequest, Operation> createSchemaCallable; private final OperationCallable<CreateSchemaRequest, Schema, CreateSchemaMetadata> createSchemaOperationCallable; private final UnaryCallable<UpdateSchemaRequest, Operation> updateSchemaCallable; private final OperationCallable<UpdateSchemaRequest, Schema, UpdateSchemaMetadata> updateSchemaOperationCallable; private final UnaryCallable<DeleteSchemaRequest, Operation> deleteSchemaCallable; private final OperationCallable<DeleteSchemaRequest, Empty, DeleteSchemaMetadata> deleteSchemaOperationCallable; private final BackgroundResource backgroundResources; private final HttpJsonOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonSchemaServiceStub create(SchemaServiceStubSettings settings) throws IOException { return new HttpJsonSchemaServiceStub(settings, ClientContext.create(settings)); } public static final HttpJsonSchemaServiceStub create(ClientContext clientContext) throws IOException { return new HttpJsonSchemaServiceStub( SchemaServiceStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonSchemaServiceStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonSchemaServiceStub( SchemaServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonSchemaServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonSchemaServiceStub( SchemaServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonSchemaServiceCallableFactory()); } /** * Constructs an instance of HttpJsonSchemaServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonSchemaServiceStub( SchemaServiceStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonOperationsStub.create( clientContext, callableFactory, typeRegistry, ImmutableMap.<String, HttpRule>builder() .put( "google.longrunning.Operations.CancelOperation", HttpRule.newBuilder() .setPost( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel") .addAdditionalBindings( HttpRule.newBuilder() .setPost( "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel") .build()) .build()) .put( "google.longrunning.Operations.GetOperation", HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}") .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/dataStores/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/evaluations/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1alpha/{name=projects/*/locations/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1alpha/{name=projects/*/operations/*}") .build()) .build()) .put( "google.longrunning.Operations.ListOperations", HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataConnector}/operations") .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*/engines/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/collections/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/dataStores/*/models/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/dataStores/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1alpha/{name=projects/*/locations/*/identity_mapping_stores/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1alpha/{name=projects/*/locations/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1alpha/{name=projects/*}/operations") .build()) .build()) .build()); HttpJsonCallSettings<GetSchemaRequest, Schema> getSchemaTransportSettings = HttpJsonCallSettings.<GetSchemaRequest, Schema>newBuilder() .setMethodDescriptor(getSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListSchemasRequest, ListSchemasResponse> listSchemasTransportSettings = HttpJsonCallSettings.<ListSchemasRequest, ListSchemasResponse>newBuilder() .setMethodDescriptor(listSchemasMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<CreateSchemaRequest, Operation> createSchemaTransportSettings = HttpJsonCallSettings.<CreateSchemaRequest, Operation>newBuilder() .setMethodDescriptor(createSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateSchemaRequest, Operation> updateSchemaTransportSettings = HttpJsonCallSettings.<UpdateSchemaRequest, Operation>newBuilder() .setMethodDescriptor(updateSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("schema.name", String.valueOf(request.getSchema().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteSchemaRequest, Operation> deleteSchemaTransportSettings = HttpJsonCallSettings.<DeleteSchemaRequest, Operation>newBuilder() .setMethodDescriptor(deleteSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.getSchemaCallable = callableFactory.createUnaryCallable( getSchemaTransportSettings, settings.getSchemaSettings(), clientContext); this.listSchemasCallable = callableFactory.createUnaryCallable( listSchemasTransportSettings, settings.listSchemasSettings(), clientContext); this.listSchemasPagedCallable = callableFactory.createPagedCallable( listSchemasTransportSettings, settings.listSchemasSettings(), clientContext); this.createSchemaCallable = callableFactory.createUnaryCallable( createSchemaTransportSettings, settings.createSchemaSettings(), clientContext); this.createSchemaOperationCallable = callableFactory.createOperationCallable( createSchemaTransportSettings, settings.createSchemaOperationSettings(), clientContext, httpJsonOperationsStub); this.updateSchemaCallable = callableFactory.createUnaryCallable( updateSchemaTransportSettings, settings.updateSchemaSettings(), clientContext); this.updateSchemaOperationCallable = callableFactory.createOperationCallable( updateSchemaTransportSettings, settings.updateSchemaOperationSettings(), clientContext, httpJsonOperationsStub); this.deleteSchemaCallable = callableFactory.createUnaryCallable( deleteSchemaTransportSettings, settings.deleteSchemaSettings(), clientContext); this.deleteSchemaOperationCallable = callableFactory.createOperationCallable( deleteSchemaTransportSettings, settings.deleteSchemaOperationSettings(), clientContext, httpJsonOperationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(getSchemaMethodDescriptor); methodDescriptors.add(listSchemasMethodDescriptor); methodDescriptors.add(createSchemaMethodDescriptor); methodDescriptors.add(updateSchemaMethodDescriptor); methodDescriptors.add(deleteSchemaMethodDescriptor); return methodDescriptors; } public HttpJsonOperationsStub getHttpJsonOperationsStub() { return httpJsonOperationsStub; } @Override public UnaryCallable<GetSchemaRequest, Schema> getSchemaCallable() { return getSchemaCallable; } @Override public UnaryCallable<ListSchemasRequest, ListSchemasResponse> listSchemasCallable() { return listSchemasCallable; } @Override public UnaryCallable<ListSchemasRequest, ListSchemasPagedResponse> listSchemasPagedCallable() { return listSchemasPagedCallable; } @Override public UnaryCallable<CreateSchemaRequest, Operation> createSchemaCallable() { return createSchemaCallable; } @Override public OperationCallable<CreateSchemaRequest, Schema, CreateSchemaMetadata> createSchemaOperationCallable() { return createSchemaOperationCallable; } @Override public UnaryCallable<UpdateSchemaRequest, Operation> updateSchemaCallable() { return updateSchemaCallable; } @Override public OperationCallable<UpdateSchemaRequest, Schema, UpdateSchemaMetadata> updateSchemaOperationCallable() { return updateSchemaOperationCallable; } @Override public UnaryCallable<DeleteSchemaRequest, Operation> deleteSchemaCallable() { return deleteSchemaCallable; } @Override public OperationCallable<DeleteSchemaRequest, Empty, DeleteSchemaMetadata> deleteSchemaOperationCallable() { return deleteSchemaOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
35,033
java-bigqueryreservation/proto-google-cloud-bigqueryreservation-v1/src/main/java/com/google/cloud/bigquery/reservation/v1/UpdateCapacityCommitmentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/reservation/v1/reservation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.reservation.v1; /** * * * <pre> * The request for * [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. * </pre> * * Protobuf type {@code google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest} */ public final class UpdateCapacityCommitmentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) UpdateCapacityCommitmentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCapacityCommitmentRequest.newBuilder() to construct. private UpdateCapacityCommitmentRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCapacityCommitmentRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCapacityCommitmentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.reservation.v1.ReservationProto .internal_static_google_cloud_bigquery_reservation_v1_UpdateCapacityCommitmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.reservation.v1.ReservationProto .internal_static_google_cloud_bigquery_reservation_v1_UpdateCapacityCommitmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest.class, com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest.Builder.class); } private int bitField0_; public static final int CAPACITY_COMMITMENT_FIELD_NUMBER = 1; private com.google.cloud.bigquery.reservation.v1.CapacityCommitment capacityCommitment_; /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1;</code> * * @return Whether the capacityCommitment field is set. */ @java.lang.Override public boolean hasCapacityCommitment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1;</code> * * @return The capacityCommitment. */ @java.lang.Override public com.google.cloud.bigquery.reservation.v1.CapacityCommitment getCapacityCommitment() { return capacityCommitment_ == null ? com.google.cloud.bigquery.reservation.v1.CapacityCommitment.getDefaultInstance() : capacityCommitment_; } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1;</code> */ @java.lang.Override public com.google.cloud.bigquery.reservation.v1.CapacityCommitmentOrBuilder getCapacityCommitmentOrBuilder() { return capacityCommitment_ == null ? com.google.cloud.bigquery.reservation.v1.CapacityCommitment.getDefaultInstance() : capacityCommitment_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCapacityCommitment()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCapacityCommitment()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest)) { return super.equals(obj); } com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest other = (com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) obj; if (hasCapacityCommitment() != other.hasCapacityCommitment()) return false; if (hasCapacityCommitment()) { if (!getCapacityCommitment().equals(other.getCapacityCommitment())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCapacityCommitment()) { hash = (37 * hash) + CAPACITY_COMMITMENT_FIELD_NUMBER; hash = (53 * hash) + getCapacityCommitment().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request for * [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. * </pre> * * Protobuf type {@code google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.reservation.v1.ReservationProto .internal_static_google_cloud_bigquery_reservation_v1_UpdateCapacityCommitmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.reservation.v1.ReservationProto .internal_static_google_cloud_bigquery_reservation_v1_UpdateCapacityCommitmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest.class, com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest.Builder .class); } // Construct using // com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCapacityCommitmentFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; capacityCommitment_ = null; if (capacityCommitmentBuilder_ != null) { capacityCommitmentBuilder_.dispose(); capacityCommitmentBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.reservation.v1.ReservationProto .internal_static_google_cloud_bigquery_reservation_v1_UpdateCapacityCommitmentRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest build() { com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest buildPartial() { com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest result = new com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.capacityCommitment_ = capacityCommitmentBuilder_ == null ? capacityCommitment_ : capacityCommitmentBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) { return mergeFrom( (com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest other) { if (other == com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest .getDefaultInstance()) return this; if (other.hasCapacityCommitment()) { mergeCapacityCommitment(other.getCapacityCommitment()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getCapacityCommitmentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.bigquery.reservation.v1.CapacityCommitment capacityCommitment_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.reservation.v1.CapacityCommitment, com.google.cloud.bigquery.reservation.v1.CapacityCommitment.Builder, com.google.cloud.bigquery.reservation.v1.CapacityCommitmentOrBuilder> capacityCommitmentBuilder_; /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> * * @return Whether the capacityCommitment field is set. */ public boolean hasCapacityCommitment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> * * @return The capacityCommitment. */ public com.google.cloud.bigquery.reservation.v1.CapacityCommitment getCapacityCommitment() { if (capacityCommitmentBuilder_ == null) { return capacityCommitment_ == null ? com.google.cloud.bigquery.reservation.v1.CapacityCommitment.getDefaultInstance() : capacityCommitment_; } else { return capacityCommitmentBuilder_.getMessage(); } } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ public Builder setCapacityCommitment( com.google.cloud.bigquery.reservation.v1.CapacityCommitment value) { if (capacityCommitmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } capacityCommitment_ = value; } else { capacityCommitmentBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ public Builder setCapacityCommitment( com.google.cloud.bigquery.reservation.v1.CapacityCommitment.Builder builderForValue) { if (capacityCommitmentBuilder_ == null) { capacityCommitment_ = builderForValue.build(); } else { capacityCommitmentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ public Builder mergeCapacityCommitment( com.google.cloud.bigquery.reservation.v1.CapacityCommitment value) { if (capacityCommitmentBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && capacityCommitment_ != null && capacityCommitment_ != com.google.cloud.bigquery.reservation.v1.CapacityCommitment .getDefaultInstance()) { getCapacityCommitmentBuilder().mergeFrom(value); } else { capacityCommitment_ = value; } } else { capacityCommitmentBuilder_.mergeFrom(value); } if (capacityCommitment_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ public Builder clearCapacityCommitment() { bitField0_ = (bitField0_ & ~0x00000001); capacityCommitment_ = null; if (capacityCommitmentBuilder_ != null) { capacityCommitmentBuilder_.dispose(); capacityCommitmentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ public com.google.cloud.bigquery.reservation.v1.CapacityCommitment.Builder getCapacityCommitmentBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCapacityCommitmentFieldBuilder().getBuilder(); } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ public com.google.cloud.bigquery.reservation.v1.CapacityCommitmentOrBuilder getCapacityCommitmentOrBuilder() { if (capacityCommitmentBuilder_ != null) { return capacityCommitmentBuilder_.getMessageOrBuilder(); } else { return capacityCommitment_ == null ? com.google.cloud.bigquery.reservation.v1.CapacityCommitment.getDefaultInstance() : capacityCommitment_; } } /** * * * <pre> * Content of the capacity commitment to update. * </pre> * * <code>.google.cloud.bigquery.reservation.v1.CapacityCommitment capacity_commitment = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.reservation.v1.CapacityCommitment, com.google.cloud.bigquery.reservation.v1.CapacityCommitment.Builder, com.google.cloud.bigquery.reservation.v1.CapacityCommitmentOrBuilder> getCapacityCommitmentFieldBuilder() { if (capacityCommitmentBuilder_ == null) { capacityCommitmentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.reservation.v1.CapacityCommitment, com.google.cloud.bigquery.reservation.v1.CapacityCommitment.Builder, com.google.cloud.bigquery.reservation.v1.CapacityCommitmentOrBuilder>( getCapacityCommitment(), getParentForChildren(), isClean()); capacityCommitment_ = null; } return capacityCommitmentBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest) private static final com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest(); } public static com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCapacityCommitmentRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCapacityCommitmentRequest>() { @java.lang.Override public UpdateCapacityCommitmentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateCapacityCommitmentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCapacityCommitmentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.reservation.v1.UpdateCapacityCommitmentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/j2objc
35,206
jre_emul/android/platform/libcore/luni/src/test/java/libcore/java/nio/channels/AsynchronousSocketChannelTest.java
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package libcore.java.nio.channels; import java.net.BindException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketOption; import java.net.StandardSocketOptions; import java.nio.ByteBuffer; import java.nio.channels.AsynchronousChannelGroup; import java.nio.channels.AsynchronousSocketChannel; import java.nio.channels.ClosedChannelException; import java.nio.channels.NotYetConnectedException; import java.nio.channels.UnresolvedAddressException; import java.nio.channels.UnsupportedAddressTypeException; import java.nio.channels.spi.AsynchronousChannelProvider; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import junit.framework.TestCase; /* J2ObjC removed: unsupported import libcore.junit.junit3.TestCaseWithRules; import libcore.junit.util.ResourceLeakageDetector; import libcore.junit.util.ResourceLeakageDetector.LeakageDetectorRule; public class AsynchronousSocketChannelTest extends TestCaseWithRules { */ public class AsynchronousSocketChannelTest extends TestCase { /* J2ObjC removed: unsupported ResourceLeakageDetector @Rule public LeakageDetectorRule leakageDetectorRule = ResourceLeakageDetector.getRule(); */ // Comfortably smaller than the default TCP socket buffer size to avoid blocking on write. final int NON_BLOCKING_MESSAGE_SIZE = 32; public void test_connect() throws Exception { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); assertEquals(asc.provider(), AsynchronousChannelProvider.provider()); assertTrue(asc.isOpen()); assertNull(asc.getRemoteAddress()); assertNull(asc.getLocalAddress()); // Connect InetSocketAddress remoteAddress = new InetSocketAddress("localhost", ss.getLocalPort()); Future<Void> connectFuture = asc.connect(remoteAddress); connectFuture.get(1000, TimeUnit.MILLISECONDS); Socket s = ss.accept(); assertNotNull(asc.getLocalAddress()); assertEquals(asc.getLocalAddress(), s.getRemoteSocketAddress()); assertNotNull(asc.getRemoteAddress()); assertEquals(asc.getRemoteAddress(), s.getLocalSocketAddress()); assertTrue(asc.isOpen()); asc.close(); ss.close(); s.close(); } public void test_bind() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); assertNull(asc.getLocalAddress()); assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.bind(new InetSocketAddress(0)); assertNotNull(asc.getLocalAddress()); assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.close(); } static class MySocketAddress extends SocketAddress { final static long serialVersionUID = 0; } public void test_bind_unsupportedAddress() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); try { asc.bind(new MySocketAddress()); fail(); } catch (UnsupportedAddressTypeException expected) {} assertNull(asc.getLocalAddress()); assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.close(); } public void test_bind_unresolvedAddress() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); try { asc.bind(new InetSocketAddress("unresolvedname", 31415)); fail(); } catch (UnresolvedAddressException expected) {} assertNull(asc.getLocalAddress()); assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.close(); } public void test_bind_usedAddress() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); ServerSocket ss = new ServerSocket(0); try { asc.bind(ss.getLocalSocketAddress()); fail(); } catch (BindException expected) {} assertNull(asc.getLocalAddress()); assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); ss.close(); asc.close(); } public void test_bind_null() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); asc.bind(null); assertNotNull(asc.getLocalAddress()); assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.close(); } public void test_connect_unresolvedAddress() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); try { asc.connect(new InetSocketAddress("unresolvedname", 31415)); fail(); } catch (UnresolvedAddressException expected) {} assertNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.close(); } public void test_close() throws Throwable { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); assertTrue(asc.isOpen()); asc.close(); assertFalse(asc.isOpen()); try { asc.getRemoteAddress(); fail(); } catch (ClosedChannelException expected) {} try { asc.getLocalAddress(); fail(); } catch (ClosedChannelException expected) {} ByteBuffer tmp = createTestByteBuffer(16, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = null; FutureLikeCompletionHandler<Long> longCompletionHandler = null; Future<Integer> readFuture = asc.read(tmp); try { readFuture.get(1000, TimeUnit.MILLISECONDS); fail(); } catch (ExecutionException expected) { assertTrue(expected.getCause() instanceof ClosedChannelException); } longCompletionHandler = new FutureLikeCompletionHandler<>(); asc.read(new ByteBuffer[]{tmp}, 0, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); try { longCompletionHandler.get(1000); fail(); } catch (ClosedChannelException expected) {} intCompletionHandler = new FutureLikeCompletionHandler<>(); asc.read(tmp, null, intCompletionHandler); try { intCompletionHandler.get(1000); fail(); } catch (ClosedChannelException expected) {} intCompletionHandler = new FutureLikeCompletionHandler<>(); asc.read(tmp, 100, TimeUnit.MILLISECONDS, null, intCompletionHandler); try { intCompletionHandler.get(100); fail(); } catch (ClosedChannelException expected) {} Future<Integer> writeFuture = asc.write(tmp); try { writeFuture.get(1000, TimeUnit.MILLISECONDS); fail(); } catch (ExecutionException expected) { assertTrue(expected.getCause() instanceof ClosedChannelException); } longCompletionHandler = new FutureLikeCompletionHandler<>(); asc.write(new ByteBuffer[]{tmp}, 0, 1, 100, TimeUnit.MILLISECONDS, null, longCompletionHandler); try { longCompletionHandler.get(1000); fail(); } catch (ClosedChannelException expected) {} intCompletionHandler = new FutureLikeCompletionHandler<>(); asc.write(tmp, null, intCompletionHandler); try { intCompletionHandler.get(1000); fail(); } catch (ClosedChannelException expected) {} intCompletionHandler = new FutureLikeCompletionHandler<>(); asc.write(tmp, 100, TimeUnit.MILLISECONDS, null, intCompletionHandler); try { intCompletionHandler.get(1000); fail(); } catch (ClosedChannelException expected) {} try { asc.setOption(StandardSocketOptions.SO_REUSEADDR, true); fail(); } catch(ClosedChannelException expected) {} // Try second close asc.close(); } public void test_futureReadWrite_HeapButeBuffer() throws Exception { test_futureReadWrite(false /* useDirectByteBuffer */); } public void test_futureReadWrite_DirectByteBuffer() throws Exception { test_futureReadWrite(true /* useDirectByteBuffer */); } private void test_futureReadWrite(boolean useDirectByteBuffer) throws Exception { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect Future<Void> connectFuture = asc.connect(ss.getLocalSocketAddress()); connectFuture.get(1000, TimeUnit.MILLISECONDS); assertNotNull(asc.getRemoteAddress()); assertTrue(connectFuture.isDone()); // Accept & write data final int messageSize = NON_BLOCKING_MESSAGE_SIZE; final ByteBuffer sendData = createTestByteBuffer(messageSize, useDirectByteBuffer); Socket sss = ss.accept(); // Small message, won't block on write sss.getOutputStream().write(sendData.array(), sendData.arrayOffset(), messageSize); // Read data from async channel and call #get on result future ByteBuffer receivedData = createTestByteBuffer(messageSize, useDirectByteBuffer); assertEquals(messageSize, (int)asc.read(receivedData).get(1000, TimeUnit.MILLISECONDS)); // Compare results receivedData.flip(); assertEquals(sendData, receivedData); // Write data to async channel and call #get on result future assertEquals(messageSize, (int)asc.write(sendData).get(1000, TimeUnit.MILLISECONDS)); // Read data and compare with original byte[] readArray = new byte[messageSize]; assertEquals(messageSize, sss.getInputStream().read(readArray)); // Compare results sendData.flip(); assertEquals(sendData, ByteBuffer.wrap(readArray)); asc.close(); sss.close(); ss.close(); } public void test_completionHandlerReadWrite_HeapByteBuffer() throws Throwable { test_completionHandlerReadWrite(false /* useDirectByteBuffer */); } public void test_completionHandlerReadWrite_DirectByteBuffer() throws Throwable { test_completionHandlerReadWrite(true /* useDirectByteBuffer */); } private void test_completionHandlerReadWrite(boolean useDirectByteBuffer) throws Throwable { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); Object attachment = new Integer(1); asc.connect(ss.getLocalSocketAddress(), attachment, connectCompletionHandler); connectCompletionHandler.get(1000); assertNotNull(asc.getRemoteAddress()); assertEquals(attachment, connectCompletionHandler.getAttachment()); // Accept & write data final int messageSize = NON_BLOCKING_MESSAGE_SIZE; ByteBuffer sendData = createTestByteBuffer(messageSize, useDirectByteBuffer); Socket sss = ss.accept(); // Small message, won't block on write sss.getOutputStream().write(sendData.array(), sendData.arrayOffset(), messageSize); // Read data from async channel ByteBuffer receivedData = createTestByteBuffer(messageSize, useDirectByteBuffer); FutureLikeCompletionHandler<Integer> readCompletionHandler = new FutureLikeCompletionHandler<>(); asc.read(receivedData, attachment, readCompletionHandler); assertEquals(messageSize, (int)readCompletionHandler.get(1000)); assertEquals(attachment, readCompletionHandler.getAttachment()); // Compare results receivedData.flip(); assertEquals(sendData, receivedData); // Write data to async channel FutureLikeCompletionHandler<Integer> writeCompletionHandler = new FutureLikeCompletionHandler<>(); asc.write(sendData, attachment, writeCompletionHandler); assertEquals(messageSize, (int)writeCompletionHandler.get(1000)); assertEquals(attachment, writeCompletionHandler.getAttachment()); // Read data and compare with original byte[] readArray = new byte[messageSize]; assertEquals(messageSize, sss.getInputStream().read(readArray)); sendData.flip(); assertEquals(sendData, ByteBuffer.wrap(readArray)); asc.close(); sss.close(); ss.close(); } public void test_scatterReadWrite_HeapByteBuffer() throws Throwable { test_scatterReadWrite(false /* useDirectByteBuffer */); } // b/282198642 // public void test_scatterReadWrite_DirectByteBuffer() throws Throwable { // test_scatterReadWrite(true /* useDirectByteBuffer */); // } private void test_scatterReadWrite(boolean useDirectByteBuffer) throws Throwable { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); Object attachment = new Integer(1); asc.connect(ss.getLocalSocketAddress(), attachment, connectCompletionHandler); connectCompletionHandler.get(1000); assertNotNull(asc.getRemoteAddress()); assertEquals(attachment, connectCompletionHandler.getAttachment()); // Accept & write data final int messageSize = NON_BLOCKING_MESSAGE_SIZE; ByteBuffer sendData1 = createTestByteBuffer(messageSize, useDirectByteBuffer, 0); ByteBuffer sendData2 = createTestByteBuffer(messageSize, useDirectByteBuffer, 6); Socket sss = ss.accept(); // Small message, won't block on write sss.getOutputStream().write(sendData1.array(), sendData1.arrayOffset(), messageSize); sss.getOutputStream().write(sendData2.array(), sendData2.arrayOffset(), messageSize); // Read data from async channel ByteBuffer receivedData1 = createTestByteBuffer(messageSize, useDirectByteBuffer); ByteBuffer receivedData2 = createTestByteBuffer(messageSize, useDirectByteBuffer); FutureLikeCompletionHandler<Long> readCompletionHandler = new FutureLikeCompletionHandler<>(); asc.read(new ByteBuffer[]{receivedData1, receivedData2}, 0, 2, 1000L, TimeUnit.MILLISECONDS, attachment, readCompletionHandler); assertEquals(messageSize * 2L, (long)readCompletionHandler.get(1000)); assertEquals(attachment, readCompletionHandler.getAttachment()); // Compare results receivedData1.flip(); assertEquals(sendData1, receivedData1); receivedData2.flip(); assertEquals(sendData2, receivedData2); // Write data to async channel FutureLikeCompletionHandler<Long> writeCompletionHandler = new FutureLikeCompletionHandler<>(); asc.write(new ByteBuffer[]{sendData1, sendData2}, 0, 2, 1000L, TimeUnit.MILLISECONDS, attachment, writeCompletionHandler); assertEquals(messageSize*2L, (long)writeCompletionHandler.get(1000)); assertEquals(attachment, writeCompletionHandler.getAttachment()); // Read data and compare with original byte[] readArray = new byte[messageSize]; assertEquals(messageSize, sss.getInputStream().read(readArray)); sendData1.flip(); assertEquals(sendData1, ByteBuffer.wrap(readArray)); assertEquals(messageSize, sss.getInputStream().read(readArray)); sendData2.flip(); assertEquals(sendData2, ByteBuffer.wrap(readArray)); asc.close(); sss.close(); ss.close(); } public void test_completionHandler_connect_npe() throws Exception { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); // 1st argument NPE try { asc.connect(null, null, connectCompletionHandler); fail(); } catch(IllegalArgumentException expected) {} // 3rd argument NPE try { asc.connect(ss.getLocalSocketAddress(), null, null); fail(); } catch(NullPointerException expected) {} asc.close(); ss.close(); } public void test_read_npe() throws Throwable { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); asc.connect(ss.getLocalSocketAddress(), null, connectCompletionHandler); connectCompletionHandler.get(1000); assertNotNull(asc.getRemoteAddress()); // Read data from async channel ByteBuffer receivedData = createTestByteBuffer(32, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = new FutureLikeCompletionHandler<>(); FutureLikeCompletionHandler<Long> longCompletionHandler = new FutureLikeCompletionHandler<>(); // 1st argument NPE try { asc.read(null, null, intCompletionHandler); fail(); } catch(NullPointerException expected) {} // 3rd argument NPE try { asc.read(receivedData, null, null); fail(); } catch(NullPointerException expected) {} // With timeout, 1st argument NPE try { asc.read(null, 100, TimeUnit.MILLISECONDS, null, intCompletionHandler); fail(); } catch(NullPointerException expected) {} // With timeout, 5rd argument NPE try { asc.read(receivedData, 100, TimeUnit.MILLISECONDS, null, null); fail(); } catch(NullPointerException expected) {} // Scatter read, 1st argument NPE try { asc.read(null, 0, 1, 0, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(NullPointerException expected) {} // Scatter read, 1st argument NPE try { asc.read(new ByteBuffer[]{null}, 0, 1, 0, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(NullPointerException expected) {} // Scatter read, last argument NPE try { asc.read(new ByteBuffer[]{receivedData}, 0, 1, 0, TimeUnit.MILLISECONDS, null, null); fail(); } catch(NullPointerException expected) {} asc.close(); ss.close(); } public void test_read_not_connected() throws Throwable { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); ByteBuffer receivedData = createTestByteBuffer(32, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = new FutureLikeCompletionHandler<>(); FutureLikeCompletionHandler<Long> longCompletionHandler = new FutureLikeCompletionHandler<>(); try { asc.read(receivedData); fail(); } catch(NotYetConnectedException expected) {} try { asc.read(receivedData, null, intCompletionHandler); fail(); } catch(NotYetConnectedException expected) {} try { asc.read(receivedData, 100, TimeUnit.MILLISECONDS, null, intCompletionHandler); fail(); } catch(NotYetConnectedException expected) {} try { asc.read(new ByteBuffer[] {receivedData}, 0, 1, 100, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(NotYetConnectedException expected) {} asc.close(); } public void test_read_failures() throws Throwable { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); asc.connect(ss.getLocalSocketAddress(), null, connectCompletionHandler); connectCompletionHandler.get(1000); assertNotNull(asc.getRemoteAddress()); ByteBuffer receivedData = createTestByteBuffer(32, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = new FutureLikeCompletionHandler<>(); FutureLikeCompletionHandler<Long> longCompletionHandler = new FutureLikeCompletionHandler<>(); ByteBuffer readOnly = receivedData.asReadOnlyBuffer(); // Read-only future read try { asc.read(readOnly); fail(); } catch(IllegalArgumentException expected) {} // Scatter-read read-only try { asc.read(new ByteBuffer[] {readOnly}, 0, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IllegalArgumentException expected) {} // Scatter-read bad offset try { asc.read(new ByteBuffer[] {receivedData}, -2, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} try { asc.read(new ByteBuffer[] {receivedData}, 3, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} // Scatter-read bad length try { asc.read(new ByteBuffer[] {receivedData}, 0, -1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} try { asc.read(new ByteBuffer[] {receivedData}, 0, 3, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} // Completion-handler read-only try { asc.read(readOnly, null, intCompletionHandler); fail(); } catch(IllegalArgumentException expected) {} try { asc.read(readOnly, 100L, TimeUnit.MILLISECONDS, null, intCompletionHandler); fail(); } catch(IllegalArgumentException expected) {} asc.close(); ss.close(); } public void test_write_npe() throws Throwable { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); asc.connect(ss.getLocalSocketAddress(), null, connectCompletionHandler); connectCompletionHandler.get(1000); assertNotNull(asc.getRemoteAddress()); // Read data from async channel ByteBuffer receivedData = createTestByteBuffer(32, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = new FutureLikeCompletionHandler<>(); FutureLikeCompletionHandler<Long> longCompletionHandler = new FutureLikeCompletionHandler<>(); // 1st argument NPE try { asc.write(null, null, intCompletionHandler); fail(); } catch(NullPointerException expected) {} // 3rd argument NPE try { asc.write(receivedData, null, null); fail(); } catch(NullPointerException expected) {} // With timeout, 1st argument NPE try { asc.write(null, 100, TimeUnit.MILLISECONDS, null, intCompletionHandler); fail(); } catch(NullPointerException expected) {} // With timeout, 5rd argument NPE try { asc.write(receivedData, 100, TimeUnit.MILLISECONDS, null, null); fail(); } catch(NullPointerException expected) {} // Scatter write, 1st argument NPE. try { asc.write((ByteBuffer[])null, 0, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(NullPointerException expected) {} // Scatter write, 1st argument NPE in array // Surprise, it doesn't throw (not symmetric with scatter read) asc.write(new ByteBuffer[]{null}, 0, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); // Scatter write, last argument NPE try { asc.write(new ByteBuffer[]{receivedData}, 0, 1, 0, TimeUnit.MILLISECONDS, null, null); fail(); } catch(NullPointerException expected) {} asc.close(); ss.close(); } public void test_write_not_connected() throws Throwable { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); ByteBuffer receivedData = createTestByteBuffer(32, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = new FutureLikeCompletionHandler<>(); FutureLikeCompletionHandler<Long> longCompletionHandler = new FutureLikeCompletionHandler<>(); try { asc.write(receivedData); fail(); } catch(NotYetConnectedException expected) {} try { asc.write(receivedData, null, intCompletionHandler); fail(); } catch(NotYetConnectedException expected) {} try { asc.write(receivedData, 100, TimeUnit.MILLISECONDS, null, intCompletionHandler); fail(); } catch(NotYetConnectedException expected) {} try { asc.write(new ByteBuffer[] {receivedData}, 0, 1, 100, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(NotYetConnectedException expected) {} asc.close(); } public void test_write_failures() throws Throwable { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect FutureLikeCompletionHandler<Void> connectCompletionHandler = new FutureLikeCompletionHandler<>(); asc.connect(ss.getLocalSocketAddress(), null, connectCompletionHandler); connectCompletionHandler.get(1000); assertNotNull(asc.getRemoteAddress()); ByteBuffer receivedData = createTestByteBuffer(32, false); FutureLikeCompletionHandler<Integer> intCompletionHandler = new FutureLikeCompletionHandler<>(); FutureLikeCompletionHandler<Long> longCompletionHandler = new FutureLikeCompletionHandler<>(); // Scatter-write bad offset try { asc.write(new ByteBuffer[] {receivedData}, -2, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} try { asc.write(new ByteBuffer[] {receivedData}, 3, 1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} // Scatter-write bad length try { asc.write(new ByteBuffer[] {receivedData}, 0, -1, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} try { asc.write(new ByteBuffer[] {receivedData}, 0, 3, 100L, TimeUnit.MILLISECONDS, null, longCompletionHandler); fail(); } catch(IndexOutOfBoundsException expected) {} asc.close(); ss.close(); } /* b/264665118 public void test_shutdown() throws Exception { ServerSocket ss = new ServerSocket(0); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); // Connect Future<Void> connectFuture = asc.connect(ss.getLocalSocketAddress()); connectFuture.get(1000, TimeUnit.MILLISECONDS); assertNotNull(asc.getRemoteAddress()); // Accept & write data final int messageSize = NON_BLOCKING_MESSAGE_SIZE; ByteBuffer sendData = createTestByteBuffer(messageSize, false); Socket sss = ss.accept(); // Small message, won't block on write sss.getOutputStream().write(sendData.array()); // Shutdown input, expect -1 from read asc.shutdownInput(); ByteBuffer receivedData = createTestByteBuffer(messageSize, false); // We did write something into the socket, #shutdownInput javadocs // say that "...effect on an outstanding read operation is system dependent and // therefore not specified...". It looks like on android/linux the data in // received buffer is discarded. assertEquals(-1, (int)asc.read(receivedData).get(1000, TimeUnit.MILLISECONDS)); assertEquals(-1, (int)asc.read(receivedData).get(1000, TimeUnit.MILLISECONDS)); // But we can still write! assertEquals(32, (int)asc.write(sendData).get(1000, TimeUnit.MILLISECONDS)); byte[] readArray = new byte[32]; assertEquals(32, sss.getInputStream().read(readArray)); assertTrue(Arrays.equals(sendData.array(), readArray)); // Shutdown output, expect ClosedChannelException from write asc.shutdownOutput(); try { assertEquals(-1, (int)asc.write(sendData).get(1000, TimeUnit.MILLISECONDS)); fail(); } catch(ExecutionException expected) { assertTrue(expected.getCause() instanceof ClosedChannelException); } try { assertEquals(-1, (int)asc.write(sendData).get(1000, TimeUnit.MILLISECONDS)); fail(); } catch(ExecutionException expected) { assertTrue(expected.getCause() instanceof ClosedChannelException); } // shutdownInput() & shudownOutput() != closed, shocking! assertNotNull(asc.getRemoteAddress()); assertTrue(asc.isOpen()); asc.close(); sss.close(); ss.close(); } */ public void test_options() throws Exception { try (AsynchronousSocketChannel asc = AsynchronousSocketChannel.open()) { asc.setOption(StandardSocketOptions.SO_SNDBUF, 5000); assertEquals(5000, (long) asc.getOption(StandardSocketOptions.SO_SNDBUF)); asc.setOption(StandardSocketOptions.SO_RCVBUF, 5000); assertEquals(5000, (long) asc.getOption(StandardSocketOptions.SO_RCVBUF)); asc.setOption(StandardSocketOptions.SO_KEEPALIVE, true); assertTrue(asc.getOption(StandardSocketOptions.SO_KEEPALIVE)); asc.setOption(StandardSocketOptions.SO_REUSEADDR, true); assertTrue(asc.getOption(StandardSocketOptions.SO_REUSEADDR)); asc.setOption(StandardSocketOptions.TCP_NODELAY, true); assertTrue(asc.getOption(StandardSocketOptions.TCP_NODELAY)); } } public void test_options_iae() throws Exception { AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(); try { asc.setOption(StandardSocketOptions.IP_TOS, 5); fail(); } catch (UnsupportedOperationException expected) {} asc.close(); } public void test_supportedOptions() throws Throwable { AsynchronousSocketChannel assc = AsynchronousSocketChannel.open(); Set<SocketOption<?>> supportedOptions = assc.supportedOptions(); assertEquals(5, supportedOptions.size()); assertTrue(supportedOptions.contains(StandardSocketOptions.SO_REUSEADDR)); assertTrue(supportedOptions.contains(StandardSocketOptions.SO_RCVBUF)); assertTrue(supportedOptions.contains(StandardSocketOptions.SO_SNDBUF)); assertTrue(supportedOptions.contains(StandardSocketOptions.SO_KEEPALIVE)); assertTrue(supportedOptions.contains(StandardSocketOptions.TCP_NODELAY)); // supportedOptions should work after close according to spec assc.close(); supportedOptions = assc.supportedOptions(); assertEquals(5, supportedOptions.size()); } public void test_group() throws Exception { AsynchronousChannelProvider provider = AsynchronousChannelProvider.provider(); AsynchronousChannelGroup group = provider.openAsynchronousChannelGroup(2, Executors.defaultThreadFactory()); AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(group); assertEquals(provider, asc.provider()); asc.close(); } private static ByteBuffer createTestByteBuffer(int size, boolean isDirect) { return createTestByteBuffer(size, isDirect, 0); } private static ByteBuffer createTestByteBuffer(int size, boolean isDirect, int contentOffset) { ByteBuffer bb = isDirect ? ByteBuffer.allocateDirect(size) : ByteBuffer.allocate(size); for (int i = 0; i < size; ++i) { bb.put(i, (byte)(i + contentOffset)); } return bb; } /* J2ObjC removed: unsupported ResourceLeakageDetector public void test_closeGuardSupport() throws IOException { try (AsynchronousSocketChannel asc = AsynchronousSocketChannel.open()) { leakageDetectorRule.assertUnreleasedResourceCount(asc, 1); } } */ /* J2ObjC removed: unsupported ResourceLeakageDetector public void test_closeGuardSupport_group() throws IOException { AsynchronousChannelProvider provider = AsynchronousChannelProvider.provider(); AsynchronousChannelGroup group = provider.openAsynchronousChannelGroup(2, Executors.defaultThreadFactory()); try (AsynchronousSocketChannel asc = AsynchronousSocketChannel.open(group)) { leakageDetectorRule.assertUnreleasedResourceCount(asc, 1); } } */ }
oracle/graal
35,043
truffle/src/com.oracle.truffle.api.debug.test/src/com/oracle/truffle/api/debug/test/DebugStackFrameTest.java
/* * Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.api.debug.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.graalvm.polyglot.Source; import org.junit.Test; import com.oracle.truffle.api.CallTarget; import com.oracle.truffle.api.RootCallTarget; import com.oracle.truffle.api.Truffle; import com.oracle.truffle.api.TruffleLanguage; import com.oracle.truffle.api.TruffleStackTraceElement; import com.oracle.truffle.api.debug.DebugStackFrame; import com.oracle.truffle.api.debug.DebugStackTraceElement; import com.oracle.truffle.api.debug.DebugValue; import com.oracle.truffle.api.debug.DebuggerSession; import com.oracle.truffle.api.debug.SuspendedEvent; import com.oracle.truffle.api.frame.Frame; import com.oracle.truffle.api.frame.FrameInstance; import com.oracle.truffle.api.frame.FrameSlotTypeException; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.instrumentation.GenerateWrapper; import com.oracle.truffle.api.instrumentation.InstrumentableNode; import com.oracle.truffle.api.instrumentation.ProbeNode; import com.oracle.truffle.api.instrumentation.StandardTags; import com.oracle.truffle.api.instrumentation.Tag; import com.oracle.truffle.api.instrumentation.test.InstrumentationTestLanguage; import com.oracle.truffle.api.nodes.DirectCallNode; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.source.SourceSection; import com.oracle.truffle.api.test.polyglot.ProxyLanguage; public class DebugStackFrameTest extends AbstractDebugTest { @Test public void testEvalAndSideEffects() throws Throwable { final Source source = testSource("ROOT(DEFINE(a,ROOT( \n" + " VARIABLE(a, 42), \n" + " VARIABLE(b, 43), \n" + " VARIABLE(c, 44), \n" + " STATEMENT(),\n" + // will start stepping here " STATEMENT())\n" + "), \n" + "VARIABLE(a, 42), VARIABLE(b, 43), VARIABLE(c, 44), \n" + "CALL(a))\n"); try (DebuggerSession session = startSession()) { session.suspendNextExecution(); startEval(source); expectSuspended((SuspendedEvent event) -> { Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); // assert changes to the current frame DebugStackFrame frame = stackFrames.next(); assertDynamicFrame(frame); DebugValue aValue = frame.getScope().getDeclaredValue("a"); String aStringValue = aValue.toDisplayString(); // assert changes to a parent frame frame = stackFrames.next(); assertDynamicFrame(frame); // assign from one stack frame to another one frame.getScope().getDeclaredValue("a").set(aValue); assertEquals(aStringValue, frame.getScope().getDeclaredValue("a").toDisplayString()); event.prepareContinue(); }); expectDone(); } } private static void assertDynamicFrame(DebugStackFrame frame) { assertEquals("42", frame.getScope().getDeclaredValue("a").toDisplayString()); assertEquals("43", frame.getScope().getDeclaredValue("b").toDisplayString()); assertEquals("44", frame.getScope().getDeclaredValue("c").toDisplayString()); // dynamic value should now be accessible DebugValue dStackValue = frame.getScope().getDeclaredValue("d"); assertNull(dStackValue); // should change the dynamic value assertEquals("45", frame.eval("VARIABLE(d, 45)").toDisplayString()); dStackValue = frame.getScope().getDeclaredValue("d"); assertEquals("45", dStackValue.toDisplayString()); assertEquals("45", frame.getScope().getDeclaredValue("d").toDisplayString()); // change an existing value assertEquals("45", frame.eval("VARIABLE(c, 45)").toDisplayString()); assertEquals("45", frame.getScope().getDeclaredValue("c").toDisplayString()); // set an existing value using a constant expression DebugValue bValue = frame.getScope().getDeclaredValue("b"); frame.getScope().getDeclaredValue("b").set(frame.eval("CONSTANT(46)")); assertEquals("46", frame.getScope().getDeclaredValue("b").toDisplayString()); assertEquals("46", bValue.toDisplayString()); // set an existing value using a constant expression with side effect frame.getScope().getDeclaredValue("b").set(frame.eval("VARIABLE(a, 47)")); assertEquals("47", frame.getScope().getDeclaredValue("b").toDisplayString()); assertEquals("47", frame.getScope().getDeclaredValue("a").toDisplayString()); } @Test public void testFrameValidity() throws Throwable { final Source source = testSource("ROOT(\n" + " VARIABLE(a, 42), \n" + " VARIABLE(b, 43), \n" + " VARIABLE(c, 44), \n" + " STATEMENT(),\n" + " STATEMENT()\n" + ")\n"); try (DebuggerSession session = startSession()) { session.suspendNextExecution(); startEval(source); class SharedData { DebugStackFrame frame; DebugValue stackValueWithGetValue; DebugValue stackValueWithIterator; Iterator<DebugStackFrame> frameIterator2; DebugValue heapValue; } SharedData data = new SharedData(); expectSuspended((SuspendedEvent event) -> { data.frame = event.getTopStackFrame(); Iterator<DebugStackFrame> frameIterator = event.getStackFrames().iterator(); assertSame(data.frame, frameIterator.next()); assertFalse(frameIterator.hasNext()); checkStack(data.frame, "a", "42", "b", "43", "c", "44"); // values for verifying state checks data.frameIterator2 = event.getStackFrames().iterator(); data.stackValueWithGetValue = data.frame.getScope().getDeclaredValue("a"); data.stackValueWithIterator = data.frame.getScope().getDeclaredValues().iterator().next(); // should dynamically create a local variable data.heapValue = data.frame.eval("VARIABLE(d, 45)"); event.prepareStepInto(1); // should render all pointers invalid }); expectSuspended((SuspendedEvent event) -> { // next event everything should be invalidated except heap values assertInvalidFrame(data.frame); assertInvalidIterator(data.frameIterator2); assertInvalidDebugValue(data.stackValueWithGetValue); assertInvalidDebugValue(data.stackValueWithIterator); assertEquals("45", data.heapValue.toDisplayString()); assertFalse(data.heapValue.isWritable()); assertTrue(data.heapValue.isReadable()); }); expectDone(); } } @Test public void testSourceSections() { final Source source = testSource("ROOT(DEFINE(a,ROOT(\n" + " STATEMENT())\n" + "),\n" + "DEFINE(b,ROOT(\n" + " CALL(a))\n" + "), \n" + "CALL(b))\n"); try (DebuggerSession session = startSession()) { session.suspendNextExecution(); startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); SourceSection ss = frame.getSourceSection(); assertSection(ss, "STATEMENT()", 2, 3, 2, 13); SourceSection fss = getFunctionSourceSection(frame); assertSection(fss, "ROOT(\n STATEMENT())\n", 1, 15, 2, 15); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one frame = stackFrames.next(); // b ss = frame.getSourceSection(); assertSection(ss, "CALL(a)", 5, 3, 5, 9); fss = getFunctionSourceSection(frame); assertSection(fss, "ROOT(\n CALL(a))\n", 4, 10, 5, 11); frame = stackFrames.next(); // root ss = frame.getSourceSection(); assertSection(ss, "CALL(b)", 7, 1, 7, 7); fss = getFunctionSourceSection(frame); assertSection(fss, source.getCharacters().toString(), 1, 1, 7, 9); assertFalse(stackFrames.hasNext()); event.prepareContinue(); }); expectDone(); } } @Test public void testVariables() { final Source source = testSource("ROOT(DEFINE(a,ROOT(\n" + " VARIABLE(v1, 1), \n" + " STATEMENT())\n" + "),\n" + "DEFINE(b,ROOT(\n" + " VARIABLE(v2, 2), \n" + " CALL(a))\n" + "), \n" + "VARIABLE(v3, 3), \n" + "CALL(b))\n"); try (DebuggerSession session = startSession()) { session.suspendNextExecution(); startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertEquals("a", frame.getName()); assertEquals("STATEMENT()", frame.getSourceSection().getCharacters()); checkStack(frame, "v1", "1"); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one frame = stackFrames.next(); assertEquals("b", frame.getName()); assertEquals("CALL(a)", frame.getSourceSection().getCharacters()); checkStack(frame, "v2", "2"); frame = stackFrames.next(); // root assertEquals("", frame.getName()); assertEquals("CALL(b)", frame.getSourceSection().getCharacters()); checkStack(frame, "v3", "3"); assertFalse(stackFrames.hasNext()); event.prepareContinue(); }); expectDone(); } } @Test public void testStackNodes() { TestStackLanguage language = new TestStackLanguage(); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, "Stack Test"); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertEquals(3, frame.getSourceSection().getCharLength()); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one for (int d = TestStackLanguage.DEPTH; d > 0; d--) { assertTrue("Depth: " + d, stackFrames.hasNext()); frame = stackFrames.next(); assertSection(frame.getSourceSection(), "St", 1, 1, 1, 2); } assertFalse(stackFrames.hasNext()); }); } expectDone(); } @Test public void testAsynchronousStack() { TestStackLanguage language = new TestStackLanguage(); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, "Stack Test"); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { List<List<DebugStackTraceElement>> asynchronousStacks = event.getAsynchronousStacks(); assertEquals(TestStackLanguage.DEPTH, asynchronousStacks.size()); for (int depth = 0; depth < TestStackLanguage.DEPTH; depth++) { List<DebugStackTraceElement> stack = asynchronousStacks.get(depth); assertEquals(TestStackLanguage.DEPTH - depth, stack.size()); } try { asynchronousStacks.get(TestStackLanguage.DEPTH); fail("Expected IndexOutOfBoundsException."); } catch (IndexOutOfBoundsException ex) { // O.K. } }); } expectDone(); } @Test public void testDynamicNames() { TestExecutableNamesLanguage language = new TestExecutableNamesLanguage(0, "staticName", "dynamicName1"); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, ""); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertEquals("dynamicName1", frame.getName()); }); } expectDone(); } @Test public void testDynamicNamesInDepth() { for (int depth = 0; depth < 5; depth++) { checkDynamicNames(depth, "staticName"); checkDynamicNames(depth, "staticName", "dynamicName"); checkDynamicNames(depth, "staticName", "dynamicName1", "dynamicName2"); } } private void checkDynamicNames(int depth, String rootName, String... executableNames) { TestExecutableNamesLanguage language = new TestExecutableNamesLanguage(depth, rootName, executableNames); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, depth + rootName + Arrays.toString(executableNames)); tester.startEval(source); if (executableNames.length == 0) { expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertEquals("depth = " + depth, rootName, frame.getName()); }); } else { for (String executableName : executableNames) { final String name = executableName; expectSuspended((SuspendedEvent event) -> { Iterator<DebugStackFrame> framesIterator = event.getStackFrames().iterator(); for (int d = depth; d > 0; d--) { framesIterator.next(); } DebugStackFrame frame = framesIterator.next(); assertEquals("depth = " + depth, name, frame.getName()); session.suspendNextExecution(); }); } } } expectDone(); } private static SourceSection getFunctionSourceSection(DebugStackFrame frame) { // There are only function scopes in the InstrumentationTestLanguage assertTrue(frame.getScope().isFunctionScope()); return frame.getScope().getSourceSection(); } private static void assertSection(SourceSection ss, String code, int startLine, int startColumn, int endLine, int endcolumn) { assertEquals(code, ss.getCharacters()); assertEquals("startLine", startLine, ss.getStartLine()); assertEquals("startColumn", startColumn, ss.getStartColumn()); assertEquals("endLine", endLine, ss.getEndLine()); assertEquals("endColumn", endcolumn, ss.getEndColumn()); } private static void assertInvalidDebugValue(DebugValue value) { try { value.toDisplayString(); fail(); } catch (IllegalStateException s) { } try { value.set(value); fail(); } catch (IllegalStateException s) { } try { value.isReadable(); } catch (IllegalStateException s) { } try { value.isWritable(); fail(); } catch (IllegalStateException s) { } value.getName(); // Name is known } private static void assertInvalidIterator(Iterator<DebugStackFrame> iterator) { try { iterator.hasNext(); fail(); } catch (IllegalStateException s) { } try { iterator.next(); fail(); } catch (IllegalStateException s) { } } private static void assertInvalidFrame(DebugStackFrame frame) { try { frame.eval("STATEMENT"); fail(); } catch (IllegalStateException s) { } try { frame.getName(); fail(); } catch (IllegalStateException s) { } try { frame.getSourceSection(); fail(); } catch (IllegalStateException s) { } try { frame.getScope().getDeclaredValue("d"); fail(); } catch (IllegalStateException s) { } try { frame.isInternal(); fail(); } catch (IllegalStateException s) { } try { frame.getScope().getDeclaredValues().iterator(); fail(); } catch (IllegalStateException s) { } } @Test public void testRawNodes() { TestStackLanguage language = new TestStackLanguage(); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, "Stack Test"); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertEquals(TestStackLanguage.TestStackRootNode.class, frame.getRawNode(ProxyLanguage.class).getRootNode().getClass()); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one for (int d = TestStackLanguage.DEPTH; d > 0; d--) { assertTrue("Depth: " + d, stackFrames.hasNext()); frame = stackFrames.next(); assertEquals(TestStackLanguage.TestStackRootNode.class, frame.getRawNode(ProxyLanguage.class).getRootNode().getClass()); } assertFalse(stackFrames.hasNext()); }); } expectDone(); } @Test public void testRawNodesRestricted() { TestStackLanguage language = new TestStackLanguage(); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, "Stack Test"); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertEquals(null, frame.getRawNode(InstrumentationTestLanguage.class)); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one for (int d = TestStackLanguage.DEPTH; d > 0; d--) { assertTrue("Depth: " + d, stackFrames.hasNext()); frame = stackFrames.next(); assertEquals(null, frame.getRawNode(InstrumentationTestLanguage.class)); } assertFalse(stackFrames.hasNext()); }); } expectDone(); } @Test public void testRawFrame() { TestStackLanguage language = new TestStackLanguage(); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, "Stack Test"); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertNotNull(frame.getRawFrame(ProxyLanguage.class, FrameInstance.FrameAccess.READ_WRITE)); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one for (int d = TestStackLanguage.DEPTH; d > 0; d--) { assertTrue("Depth: " + d, stackFrames.hasNext()); frame = stackFrames.next(); assertNotNull(frame.getRawFrame(ProxyLanguage.class, FrameInstance.FrameAccess.READ_WRITE)); } assertFalse(stackFrames.hasNext()); }); } expectDone(); } @Test public void testRawFrameRestricted() { TestStackLanguage language = new TestStackLanguage(); ProxyLanguage.setDelegate(language); try (DebuggerSession session = tester.startSession()) { session.suspendNextExecution(); Source source = Source.create(ProxyLanguage.ID, "Stack Test"); tester.startEval(source); expectSuspended((SuspendedEvent event) -> { DebugStackFrame frame = event.getTopStackFrame(); assertNull(frame.getRawFrame(InstrumentationTestLanguage.class, FrameInstance.FrameAccess.READ_WRITE)); Iterator<DebugStackFrame> stackFrames = event.getStackFrames().iterator(); assertEquals(frame, stackFrames.next()); // The top one for (int d = TestStackLanguage.DEPTH; d > 0; d--) { assertTrue("Depth: " + d, stackFrames.hasNext()); frame = stackFrames.next(); assertNull(frame.getRawFrame(InstrumentationTestLanguage.class, FrameInstance.FrameAccess.READ_WRITE)); } assertFalse(stackFrames.hasNext()); }); } expectDone(); } static final class TestStackLanguage extends ProxyLanguage { private static final int DEPTH = 5; TestStackLanguage() { } @Override protected CallTarget parse(TruffleLanguage.ParsingRequest request) throws Exception { com.oracle.truffle.api.source.Source source = request.getSource(); return new TestStackRootNode(languageInstance, source, DEPTH).getCallTarget(); } private static final class TestStackRootNode extends RootNode { @Node.Child private TestNode child; private final TruffleLanguage<?> language; private final String name; private final SourceSection rootSection; private final int depth; private final int entryCall = getFrameDescriptor().findOrAddAuxiliarySlot("entryCall"); TestStackRootNode(TruffleLanguage<?> language, com.oracle.truffle.api.source.Source parsedSource, int depth) { super(language); this.language = language; this.depth = depth; rootSection = parsedSource.createSection(1); name = "Test Stack"; child = createTestNodes(); insert(child); } @Override public String getName() { return name; } @Override public SourceSection getSourceSection() { return rootSection; } @Override public Object execute(VirtualFrame frame) { frame.setAuxiliarySlot(entryCall, DEPTH == depth); return child.execute(frame); } @Override protected boolean isInstrumentable() { return true; } @Override protected List<TruffleStackTraceElement> findAsynchronousFrames(Frame frame) { if (depth == 0) { return null; } boolean isEntryCall; try { isEntryCall = (boolean) frame.getAuxiliarySlot(entryCall); } catch (FrameSlotTypeException ex) { return null; } if (!isEntryCall) { return null; } List<TruffleStackTraceElement> asyncStack = new ArrayList<>(depth); TestStackRootNode asyncRoot = new TestStackRootNode(language, rootSection.getSource(), depth - 1); do { RootCallTarget callTarget = asyncRoot.getCallTarget(); TestNode leaf = asyncRoot.child; while (leaf.testChild != null) { leaf = leaf.testChild; } DirectCallNode callNode = leaf.getCallNode(); Frame asyncFrame; if (asyncRoot.depth == depth - 1) { asyncFrame = Truffle.getRuntime().createMaterializedFrame(new Object[]{}, asyncRoot.getFrameDescriptor()); asyncFrame.setAuxiliarySlot(entryCall, true); } else { asyncFrame = null; } TruffleStackTraceElement element = TruffleStackTraceElement.create(leaf, callTarget, asyncFrame); asyncStack.add(0, element); if (callNode == null) { break; } asyncRoot = (TestStackRootNode) ((RootCallTarget) callNode.getCallTarget()).getRootNode(); } while (true); return asyncStack; } private TestNode createTestNodes() { TestNode node; if (depth > 0) { RootCallTarget callTarget = new TestStackRootNode(language, rootSection.getSource(), depth - 1).getCallTarget(); DirectCallNode callNode = Truffle.getRuntime().createDirectCallNode(callTarget); if (depth % 2 == 0) { node = new TestNode() { @Child private DirectCallNode call = insert(callNode); @Override public Object execute(VirtualFrame frame) { return call.call(); } @Override protected DirectCallNode getCallNode() { return call; } }; } else { node = new TestInstrumentableNode() { @Child private DirectCallNode call = insert(callNode); @Override public SourceSection getSourceSection() { return rootSection.getSource().createUnavailableSection(); } @Override public Object execute(VirtualFrame frame) { return call.call(); } @Override protected DirectCallNode getCallNode() { return call; } }; } } else { node = new TestInstrumentableNode() { @Override public SourceSection getSourceSection() { return rootSection.getSource().createSection(0, 3); } @Override public boolean hasTag(Class<? extends Tag> tag) { return StandardTags.StatementTag.class == tag; } }; } List<TestNode> nodes = new ArrayList<>(); // A non-instrumentable node with a SourceSection nodes.add(new TestNode() { @Override public SourceSection getSourceSection() { return rootSection.getSource().createSection(0, 1); } }); // A non-instrumentable node nodes.add(new TestNode()); // An instrumentable node that says is not instrumentable // and does not have SourceSection nodes.add(new TestInstrumentableNode() { @Override public boolean isInstrumentable() { return false; } }); // An instrumentable node with unavailable SourceSection nodes.add(new TestInstrumentableNode() { @Override public SourceSection getSourceSection() { return rootSection.getSource().createUnavailableSection(); } }); // An instrumentable node that says is not instrumentable // and has a SourceSection nodes.add(new TestInstrumentableNode() { @Override public boolean isInstrumentable() { return false; } @Override public SourceSection getSourceSection() { return rootSection.getSource().createSection(0, 1); } }); // An instrumentable node with a SourceSection nodes.add(new TestInstrumentableNode() { @Override public SourceSection getSourceSection() { return rootSection.getSource().createSection(0, 2); } }); // RootTag so that it's recognized as a guest code execution nodes.add(new TestInstrumentableNode() { @Override public SourceSection getSourceSection() { return rootSection.getSource().createSection(1); } @Override public boolean hasTag(Class<? extends Tag> tag) { return StandardTags.RootTag.class == tag || StandardTags.RootBodyTag.class == tag; } }); TestNode lastNode = node; for (TestNode n : nodes) { n.testChild = lastNode; lastNode = n; } return lastNode; } } @GenerateWrapper static class TestInstrumentableNode extends TestNode implements InstrumentableNode { @Override public boolean isInstrumentable() { return true; } @Override public WrapperNode createWrapper(ProbeNode probe) { return new TestInstrumentableNodeWrapper(this, probe); } } private static class TestNode extends Node { @Node.Child TestNode testChild; public Object execute(VirtualFrame frame) { if (testChild != null) { return testChild.execute(frame); } else { return 42; } } protected DirectCallNode getCallNode() { return null; } } } }
google/closure-compiler
34,320
test/com/google/javascript/jscomp/ProcessDefinesTest.java
/* * Copyright 2007 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.javascript.jscomp.parsing.parser.FeatureSet; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * @author nicksantos@google.com (Nick Santos) */ @RunWith(JUnit4.class) public final class ProcessDefinesTest extends CompilerTestCase { public ProcessDefinesTest() { super(DEFAULT_EXTERNS + "var externMethod;"); } private final Map<String, Node> overrides = new HashMap<>(); private GlobalNamespace namespace; private ProcessDefines.Mode mode; private boolean recognizeClosureDefines = true; private boolean enableJ2clPasses = false; @Override @Before public void setUp() throws Exception { super.setUp(); overrides.clear(); mode = ProcessDefines.Mode.CHECK_AND_OPTIMIZE; // ProcessDefines emits warnings if the user tries to re-define a constant, // but the constant is not defined anywhere in the binary. allowSourcelessWarnings(); } @Override protected CompilerPass getProcessor(Compiler compiler) { if (enableJ2clPasses) { J2clSourceFileChecker.markToRunJ2clPasses(compiler); } return new ProcessDefinesWithInjectedNamespace(compiler); } private FeatureSet outputFeatureSet = FeatureSet.ES2017; private static final Pattern ZONE_INPUT_PATTERN = Pattern.compile(".*/packages/zone.js/.*\\.js"); @Override protected CompilerOptions getOptions() { CompilerOptions options = super.getOptions(); options.setOutputFeatureSet(outputFeatureSet); options.setEnableZonesDefineName("javascript.angular2.ENABLE_ZONES"); options.setZoneInputPattern(ZONE_INPUT_PATTERN); return options; } @Override protected int getNumRepetitions() { // Only do one repetition, so that we can make sure the first pass keeps // GlobalNamespace up to date. return 1; } /** * Helper for tests that expects definitions to remain unchanged, such that {@code definitions+js} * is converted to {@code definitions+expected}. */ private void testWithPrefix(String definitions, String js, String expected) { test(definitions + js, definitions + expected); } @Test public void testBasicDefine1() { test("/** @define {boolean} */ var DEF = true", "/** @define {boolean} */ var DEF=true"); } @Test public void testBasicDefine2() { test("/** @define {string} */ var DEF = 'a'", "/** @define {string} */ var DEF=\"a\""); } @Test public void testBasicDefine3() { test("/** @define {number} */ var DEF = 0", "/** @define {number} */ var DEF=0"); } @Test public void testDefineBadType() { test( srcs("/** @define {Object} */ var DEF = {}"), error(ProcessDefines.INVALID_DEFINE_TYPE), error(ProcessDefines.INVALID_DEFINE_VALUE)); } @Test public void testDefineBadType_nullishCoalesce() { test(srcs("/** @define {string} */ var DEF = 'a' ?? 'b'")); testError("/** @define {string} */ var DEF = 'a' ?? null", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testChecksOnlyProducesErrors() { mode = ProcessDefines.Mode.CHECK; test( srcs("/** @define {Object} */ var DEF = {}"), error(ProcessDefines.INVALID_DEFINE_TYPE), error(ProcessDefines.INVALID_DEFINE_VALUE)); } @Test public void testUnknownDefineWarning() { mode = ProcessDefines.Mode.OPTIMIZE; overrides.put("a.B", new Node(Token.TRUE)); test("var a = {};", "var a = {};", warning(ProcessDefines.UNKNOWN_DEFINE_WARNING)); } @Test public void testDefineWithBadValue1() { testError( "/** @define {boolean} */ var DEF = new Boolean(true);", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineWithBadValue2() { testError("/** @define {string} */ var DEF = 'x' + y;", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineWithBadValue3() { // alias is not const testError( "let x = 'x'; /** @define {string} */ var DEF = x;", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineWithBadValue4() { testError("/** @define {string} */ var DEF = null;", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineWithBadValue5() { testError("/** @define {string} */ var DEF = undefined;", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineWithBadValue6() { testError("/** @define {string} */ var DEF = NaN;", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testGoogDefineWithBadValue() { testError( "/** @define {boolean} */ var DEF = goog.define('DEF_XYZ', new Boolean(true));", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineWithLet() { testError( "/** @define {boolean} */ let DEF = new Boolean(true);", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineInExterns() { testSame(externs(DEFAULT_EXTERNS + "/** @define {boolean} */ var EXTERN_DEF;"), srcs("")); } @Test public void testDefineInExternsPlusUsage() { testSame( externs(DEFAULT_EXTERNS + "/** @define {boolean} */ var EXTERN_DEF;"), srcs("/** @define {boolean} */ var DEF = EXTERN_DEF")); } @Test public void testNonDefineInExternsPlusUsage() { testError( externs(DEFAULT_EXTERNS + "/** @const {boolean} */ var EXTERN_NON_DEF;"), srcs("/** @define {boolean} */ var DEF = EXTERN_NON_DEF"), error(ProcessDefines.INVALID_DEFINE_VALUE)); } @Test public void testDefineCompiledInExterns() { testSame(externs(DEFAULT_EXTERNS + "/** @define {boolean} */ var COMPILED;"), srcs("")); } @Test public void testDefineWithDependentValue() { test( """ /** @define {boolean} */ var BASE = false; /** @define {boolean} */ var DEF = !BASE; """, """ /** @define {boolean} */ var BASE = false; /** @define {boolean} */ var DEF = !BASE """); test( """ var a = {}; /** @define {boolean} */ a.BASE = false; /** @define {boolean} */ a.DEF = !a.BASE; """, """ var a={}; /** @define {boolean} */ a.BASE = false; /** @define {boolean} */ a.DEF = !a.BASE """); } @Test public void testDefineWithInvalidDependentValue() { testError( """ var BASE = false; /** @define {boolean} */ var DEF = !BASE; """, ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testOverriding1() { overrides.put("DEF_OVERRIDE_TO_TRUE", new Node(Token.TRUE)); overrides.put("DEF_OVERRIDE_TO_FALSE", new Node(Token.FALSE)); test( """ /** @define {boolean} */ var DEF_OVERRIDE_TO_TRUE = false; /** @define {boolean} */ var DEF_OVERRIDE_TO_FALSE = true """, """ /** @define {boolean} */ var DEF_OVERRIDE_TO_TRUE = true; /** @define {boolean} */var DEF_OVERRIDE_TO_FALSE=false """); } @Test public void testOverriding2() { overrides.put("DEF_OVERRIDE_TO_TRUE", new Node(Token.TRUE)); String normalConst = "var DEF_OVERRIDE_TO_FALSE=true;"; testWithPrefix( normalConst, "/** @define {boolean} */ var DEF_OVERRIDE_TO_TRUE = false", "/** @define {boolean} */ var DEF_OVERRIDE_TO_TRUE = true"); } @Test public void testOverriding3() { overrides.put("DEF_OVERRIDE_TO_TRUE", new Node(Token.TRUE)); test( "/** @define {boolean} */ var DEF_OVERRIDE_TO_TRUE = true;", "/** @define {boolean} */ var DEF_OVERRIDE_TO_TRUE = true"); } @Test public void testOverridingString0() { test( "/** @define {string} */ var DEF_OVERRIDE_STRING = 'x';", "/** @define {string} */ var DEF_OVERRIDE_STRING=\"x\""); } @Test public void testOverridingString1() { test( "/** @define {string} */ var DEF_OVERRIDE_STRING = 'x' + 'y';", "/** @define {string} */ var DEF_OVERRIDE_STRING=\"x\" + \"y\""); } @Test public void testOverridingString2() { overrides.put("DEF_OVERRIDE_STRING", Node.newString("foo")); test( "/** @define {string} */ var DEF_OVERRIDE_STRING = 'x';", "/** @define {string} */ var DEF_OVERRIDE_STRING=\"foo\""); } @Test public void testOverridingString3() { overrides.put("DEF_OVERRIDE_STRING", Node.newString("foo")); test( "/** @define {string} */ var DEF_OVERRIDE_STRING = 'x' + 'y';", "/** @define {string} */ var DEF_OVERRIDE_STRING=\"foo\""); } @Test public void testMisspelledOverride() { overrides.put("DEF_BAD_OVERIDE", new Node(Token.TRUE)); // NOTYPO: Intentional misspelling. test( "/** @define {boolean} */ var DEF_BAD_OVERRIDE = true", "/** @define {boolean} */ var DEF_BAD_OVERRIDE = true", warning(ProcessDefines.UNKNOWN_DEFINE_WARNING)); } @Test public void testCompiledIsKnownDefine() { overrides.put("COMPILED", new Node(Token.TRUE)); testSame(""); } @Test public void testSimpleReassign1() { test( srcs( """ /** @define {boolean} */ var DEF = false; DEF = true; """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testSimpleReassign2() { test( srcs( """ /** @define {number|boolean} */ var DEF=false; DEF=true; DEF=3 """), error(ProcessDefines.NON_CONST_DEFINE), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testSimpleReassign3() { test( srcs( """ /** @define {boolean} */ var DEF = false; var x; x = DEF = true; """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testDefineAssignedToSimpleAlias() { testSame( """ const x = true; const ALIAS = x; /** @define {boolean} */ const DEF2 = ALIAS; """); } @Test public void testDefineAssignedToNonConstAlias() { testError( """ let X = true; X = false; /** @define {boolean} */ const DEF2 = X; """, ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineAssignedToEnumAlias() { testError( """ /** @enum {string} */ const E = {A: 'a'}; /** @define {string} */ const DEF2 = E.A; """, // TODO(sdh): It would be nice if this worked, but doesn't seem worth implementing. ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineAssignedToDefineAlias() { overrides.put("DEF2", new Node(Token.TRUE)); test( """ /** @define {boolean} */ const DEF1 = false; const ALIAS = DEF1; /** @define {boolean} */ const DEF2 = ALIAS; """, """ /** @define {boolean} */ const DEF1 = false; const ALIAS = DEF1; /** @define {boolean} */ const DEF2 = true; """); } @Test public void testDefineAssignedToQualifiedNameAlias() { overrides.put("DEF1", new Node(Token.TRUE)); test( """ const ns = {}; /** @define {boolean} */ const DEF1 = false; /** @const */ ns.ALIAS = DEF1; /** @define {boolean} */ const DEF2 = ns.ALIAS; """, """ const ns = {}; /** @define {boolean} */ const DEF1 = true; /** @const */ ns.ALIAS = DEF1; /** @define {boolean} */ const DEF2 = ns.ALIAS; """); } @Test public void testDefineAssignedToNonconstDefineAlias() { testError( """ /** @define {boolean} */ const DEF1 = false; var ALIAS = DEF1; /** @define {boolean} */ const DEF2 = ALIAS; """, ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testDefineAssignedToNonconstQualifiedNameAlias() { testError( """ const ns = {}; /** @define {boolean} */ const DEF1 = false; ns.ALIAS = DEF1; /** @define {boolean} */ const DEF2 = ns.ALIAS; """, ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testAssignBeforeDeclaration_var() { test( srcs("DEF=false;var b=false,/** @define {boolean} */DEF=true,c=false"), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testAssignBeforeDeclaration_const() { test( srcs("DEF=false;const b=false,/** @define {boolean} */DEF=true,c=false"), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testAssignBeforeDeclaration_var_withOverride() { overrides.put("DEF_OVERRIDE_TO_TRUE", new Node(Token.TRUE)); test( srcs( """ DEF_OVERRIDE_TO_TRUE = 3; /** @define {boolean|number} */ var DEF_OVERRIDE_TO_TRUE = false; """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testEmptyDeclaration() { testError("/** @define {boolean} */ var DEF;", ProcessDefines.INVALID_DEFINE_VALUE); } @Test public void testReassignAfterCall() { testError( "/** @define {boolean} */var DEF=true;externMethod();DEF=false", ProcessDefines.NON_CONST_DEFINE); } @Test public void testReassignAfterRef() { testError( "/** @define {boolean} */var DEF=true;var x = DEF;DEF=false", ProcessDefines.NON_CONST_DEFINE); } @Test public void testReassignWithExpr() { test( srcs("/** @define {boolean} */var DEF=true;var x;DEF=x=false"), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testReassignAfterRefInConditional() { testError( "/** @define {boolean} */var DEF=true; if (false) {var x=DEF} DEF=false;", ProcessDefines.NON_CONST_DEFINE); } @Test public void testAssignInFunctionScope() { testError( "/** @define {boolean} */var DEF=true;function foo() {DEF=false};", ProcessDefines.NON_CONST_DEFINE); } @Test public void testDeclareInFunctionScope() { testError( "function foo() {/** @define {boolean} */var DEF=true;};", ProcessDefines.INVALID_DEFINE_LOCATION); } @Test public void testDeclareInFunctionScope_withOtherSet() { test( srcs( """ var DEF = 0; function foo() { /** @define {boolean} */ DEF=true; }; """), error(ProcessDefines.INVALID_DEFINE_LOCATION)); } @Test public void testDeclareInBlockScope() { testError( "{ /** @define {boolean} */ const DEF=true; };", // ProcessDefines.INVALID_DEFINE_LOCATION); } @Test public void testDeclareInClassStaticBlock() { testError( "class C {static {/** @define {boolean} */ const DEF=true;}}", ProcessDefines.INVALID_DEFINE_LOCATION); } @Test public void testDefineAssignmentInLoop() { testError( "/** @define {boolean} */var DEF=true;var x=0;while (x) {DEF=false;}", ProcessDefines.NON_CONST_DEFINE); } @Test public void testWithNoDefines() { testSame("var DEF=true;var x={};x.foo={}"); } @Test public void testNamespacedDefine1() { test( srcs("var a = {}; /** @define {boolean} */ a.B = false; a.B = true;"), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testNamespacedDefine2a() { overrides.put("a.B", new Node(Token.TRUE)); test( "var a = {}; /** @define {boolean} */ a.B = false;", "var a = {}; /** @define {boolean} */ a.B = true;"); } @Test public void testNamespacedDefine2b() { overrides.put("a.B", new Node(Token.TRUE)); testError( "var a = { /** @define {boolean} */ B : false };", // ProcessDefines.INVALID_DEFINE_LOCATION); } @Test public void testNamespacedDefine2c() { overrides.put("a.B", new Node(Token.TRUE)); testError( "var a = { /** @define {boolean} */ get B() { return false } };", ProcessDefines.INVALID_DEFINE_LOCATION); } @Test public void testNamespacedDefine3() { overrides.put("a.B", new Node(Token.TRUE)); test("var a = {};", "var a = {};", warning(ProcessDefines.UNKNOWN_DEFINE_WARNING)); } @Test public void testNamespacedDefine4() { overrides.put("a.B", new Node(Token.TRUE)); test( "var a = {}; /** @define {boolean} */ a.B = false;", "var a = {}; /** @define {boolean} */ a.B = true;"); } @Test public void testGoogDefine_notOverridden() { test( "/** @define {boolean} */ const B = goog.define('a.B', false);", "/** @define {boolean} */ const B = false;"); } @Test public void testGoogDefine_overridden() { overrides.put("a.B", new Node(Token.TRUE)); test( "/** @define {boolean} */ const B = goog.define('a.B', false);", "/** @define {boolean} */ const B = true;"); } @Test public void testGoogDefineAllowedFormats_notOverridden() { String jsdoc = "/** @define {number} */\n"; test(jsdoc + "var name = goog.define('name', 1);", jsdoc + "var name = 1"); test(jsdoc + "var name = goog.define('otherName', 1);", jsdoc + "var name = 1"); test(jsdoc + "const name = goog.define('name', 1);", jsdoc + "const name = 1"); test( "const ns = {};\n" + jsdoc + "ns.name = goog.define('ns.name', 1);", "const ns = {};\n" + jsdoc + "ns.name = 1;"); } @Test public void testGoogDefine_invalidCallFormats() { mode = ProcessDefines.Mode.CHECK; String jsdoc = "/** @define {number} */\n"; testError("const name = goog.define('name', 1);", ProcessDefines.MISSING_DEFINE_ANNOTATION); testError( "const name = {};\n" + jsdoc + "name.two = goog.define('name.2', 1);", ProcessDefines.INVALID_DEFINE_NAME_ERROR); testError(jsdoc + "const x = goog.define();", ClosurePrimitiveErrors.NULL_ARGUMENT_ERROR); testError( jsdoc + "const value = goog.define('value');", ClosurePrimitiveErrors.NULL_ARGUMENT_ERROR); testError( jsdoc + "const five = goog.define(5);", ClosurePrimitiveErrors.INVALID_ARGUMENT_ERROR); testError( jsdoc + "const five = goog.define('FOO', 5, 6);", ClosurePrimitiveErrors.TOO_MANY_ARGUMENTS_ERROR); testError( jsdoc + "const templateName = goog.define(`templateName`, 1);", ClosurePrimitiveErrors.INVALID_ARGUMENT_ERROR); testError( jsdoc + "const templateName = goog.define(`${template}Name`, 1);", ClosurePrimitiveErrors.INVALID_ARGUMENT_ERROR); } @Test public void testGoogDefine_invalidCallLocation() { mode = ProcessDefines.Mode.CHECK; test(srcs("goog.define('name', 1);"), error(ProcessDefines.DEFINE_CALL_WITHOUT_ASSIGNMENT)); test( srcs("/** @define {number} */ goog.define('name', 1);"), error(ProcessDefines.DEFINE_CALL_WITHOUT_ASSIGNMENT), error(ProcessDefines.INVALID_DEFINE_LOCATION)); testError( "var x = x || goog.define('goog.DEBUG', true);", ProcessDefines.DEFINE_CALL_WITHOUT_ASSIGNMENT); testError( "function f() { const debug = goog.define('goog.DEBUG', true); }", ClosurePrimitiveErrors.INVALID_CLOSURE_CALL_SCOPE_ERROR); } @Test public void testGoogDefine_enabledByRecognizeClosureDefines() { mode = ProcessDefines.Mode.CHECK; test(srcs("goog.define('name', 1);"), error(ProcessDefines.DEFINE_CALL_WITHOUT_ASSIGNMENT)); this.recognizeClosureDefines = false; testSame("goog.define('name', 1);"); } @Test public void testOverrideAfterAlias() { testError( "var x; /** @define {boolean} */var DEF=true; x=DEF; DEF=false;", ProcessDefines.NON_CONST_DEFINE); } @Test public void testBasicConstDeclaration() { test("/** @define {boolean} */ const DEF = true", "/** @define {boolean} */ const DEF=true"); test("/** @define {string} */ const DEF = 'a'", "/** @define {string} */ const DEF=\"a\""); test("/** @define {number} */ const DEF = 0", "/** @define {number} */ const DEF=0"); } @Test public void testConstOverriding1() { overrides.put("DEF_OVERRIDE_TO_TRUE", new Node(Token.TRUE)); test( "/** @define {boolean} */ const DEF_OVERRIDE_TO_TRUE = false;", "/** @define {boolean} */ const DEF_OVERRIDE_TO_TRUE = true;"); } @Test public void testConstOverriding2() { test( "/** @define {string} */ const DEF_OVERRIDE_STRING = 'x';", "/** @define {string} */ const DEF_OVERRIDE_STRING=\"x\""); } @Test public void testConstProducesUnknownDefineWarning() { mode = ProcessDefines.Mode.OPTIMIZE; overrides.put("a.B", new Node(Token.TRUE)); test("const a = {};", "const a = {};", warning(ProcessDefines.UNKNOWN_DEFINE_WARNING)); } @Test public void testSimpleConstReassign() { testError( """ /** @define {boolean} */ const DEF = false; DEF = true; """, ProcessDefines.NON_CONST_DEFINE); } @Test public void testRedeclaration_twoGoogDefine_differentLocalNames() { test( srcs( """ /** @define {boolean} */ const A = goog.define('a.B', false); /** @define {boolean} */ const B = goog.define('a.B', false); """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testRedeclaration_oneGoogDefine_varWithGoogDefineName() { test( srcs( """ /** @define {boolean} */ const A = goog.define('B', false); const B = false; """), expected( """ /** @define {boolean} */ const A = false; const B = false; """)); } @Test public void testRedeclaration_oneGoogDefine_varWithSameLocalName() { test( srcs( """ /** @define {boolean} */ var A = goog.define('B', false); var A = false; """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testRedeclaration_oneGoogDefine_oneAtDefine() { test( srcs( """ /** @define {boolean} */ const A = goog.define('B', false); /** @define {boolean} */ const B = false; """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testRedeclaration_oneAtDefine_varWithSameName() { test( srcs( """ /** @define {boolean} */ var A = false; var A = false; """), error(ProcessDefines.NON_CONST_DEFINE)); } @Test public void testClosureDefineValues_replacements() { mode = ProcessDefines.Mode.CHECK_AND_OPTIMIZE; test( """ var CLOSURE_DEFINES = {'FOO': 'closureDefault'}; /** @define {string} */ const FOO = 'original'; """, """ var CLOSURE_DEFINES = {'FOO': 'closureDefault'}; /** @define {string} */ const FOO = 'closureDefault'; """); test( """ var CLOSURE_DEFINES = CLOSURE_DEFINES || {}; CLOSURE_DEFINES['FOO'] = 'closureDefault'; /** @define {string} */ const FOO = 'original'; """, """ var CLOSURE_DEFINES = CLOSURE_DEFINES || {}; CLOSURE_DEFINES['FOO'] = 'closureDefault'; /** @define {string} */ const FOO = 'closureDefault'; """); test( """ var CLOSURE_DEFINES = {'FOO': true}; /** @define {boolean} */ const FOO = false; """, """ var CLOSURE_DEFINES = {'FOO': true}; /** @define {boolean} */ const FOO = true; """); test( """ var CLOSURE_DEFINES = {'FOO': false}; /** @define {boolean} */ const FOO = false; """, """ var CLOSURE_DEFINES = {'FOO': false}; /** @define {boolean} */ const FOO = false; """); test( """ var CLOSURE_DEFINES = {'FOO': 1}; /** @define {number} */ const FOO = 2; """, """ var CLOSURE_DEFINES = {'FOO': 1}; /** @define {number} */ const FOO = 1; """); test( """ var CLOSURE_DEFINES = {'FOO': 0xABCD}; /** @define {number} */ const FOO = 2; """, """ var CLOSURE_DEFINES = {'FOO': 0xABCD}; /** @define {number} */ const FOO = 0xABCD; """); } @Test public void testClosureDefineValues_duplicateKey() { mode = ProcessDefines.Mode.CHECK_AND_OPTIMIZE; test( srcs( """ var CLOSURE_DEFINES = CLOSURE_DEFINES || {}; CLOSURE_DEFINES['FOO'] = 'firstVersionIgnored'; CLOSURE_DEFINES['FOO'] = 'closureDefault'; /** @define {string} */ const FOO = 'original'; """), error(ProcessDefines.CLOSURE_DEFINES_MULTIPLE)); test( srcs( """ var CLOSURE_DEFINES = {'FOO': 'firstVersionIgnored'}; CLOSURE_DEFINES['FOO'] = 'closureDefault'; /** @define {string} */ const FOO = 'original'; """), error(ProcessDefines.CLOSURE_DEFINES_MULTIPLE)); } @Test public void testClosureDefineValues_namespacedReplacement() { test( """ var CLOSURE_DEFINES = {'a.b': 'closureDefault'}; const a = {}; /** @define {string} */ a.b = 'original'; """, """ var CLOSURE_DEFINES = {'a.b': 'closureDefault'}; const a = {}; /** @define {string} */ a.b = 'closureDefault'; """); } @Test public void testClosureDefineValues_replacementWithGoogDefine() { mode = ProcessDefines.Mode.CHECK_AND_OPTIMIZE; test( """ var CLOSURE_DEFINES = {'FOO': 'closureDefault'}; /** @define {string} */ const f = goog.define('FOO', 'original'); """, """ var CLOSURE_DEFINES = {'FOO': 'closureDefault'}; /** @define {string} */ const f = 'closureDefault'; """); } @Test public void testClosureDefineValues_replacementWithOverriddenDefine() { // Command-line flag takes precedence over CLOSURE_DEFINES. mode = ProcessDefines.Mode.CHECK_AND_OPTIMIZE; overrides.put("FOO", IR.string("override")); test( """ var CLOSURE_DEFINES = {'FOO': 'closureDefault'}; /** @define {string} */ const f = goog.define('FOO', 'original'); """, """ var CLOSURE_DEFINES = {'FOO': 'closureDefault'}; /** @define {string} */ const f = 'override'; """); } @Test public void testClosureDefineValues_checkOnlyDoesntModifyAst() { mode = ProcessDefines.Mode.CHECK; testSame( """ var CLOSURE_DEFINES = {'FOO': 'string'}; /** @define {string} */ const f = goog.define('FOO', 'tmp'); """); } @Test public void testClosureDefines_unknownDefineErrors() { mode = ProcessDefines.Mode.OPTIMIZE; test(srcs("var CLOSURE_DEFINES = {'FOO': 0};"), warning(ProcessDefines.UNKNOWN_DEFINE_WARNING)); test(srcs("CLOSURE_DEFINES['FOO'] = 0;"), warning(ProcessDefines.UNKNOWN_DEFINE_WARNING)); } @Test public void testClosureDefines_mustBeGlobal() { mode = ProcessDefines.Mode.CHECK; test( srcs( """ (function() { var CLOSURE_DEFINES = {'FOO': 0}; })(); /** @define {number} */ const FOO = 1; """), error(ProcessDefines.NON_GLOBAL_CLOSURE_DEFINES_ERROR)); test( srcs( """ if (cond) { var CLOSURE_DEFINES = {'FOO': 0}; } /** @define {number} */ const FOO = 1; """), error(ProcessDefines.NON_GLOBAL_CLOSURE_DEFINES_ERROR)); test( srcs( """ if (cond) { CLOSURE_DEFINES['FOO'] = 0; } /** @define {number} */ const FOO = 1; """), error(ProcessDefines.NON_GLOBAL_CLOSURE_DEFINES_ERROR)); } @Test public void testClosureDefines_valuesErrors() { mode = ProcessDefines.Mode.CHECK; testError("var CLOSURE_DEFINES = {'FOO': a};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError("var CLOSURE_DEFINES = {'FOO': 0+1};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError( "var CLOSURE_DEFINES = {'FOO': 'value' + 'value'};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError("var CLOSURE_DEFINES = {'FOO': !true};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError("var CLOSURE_DEFINES = {'FOO': -true};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError("var CLOSURE_DEFINES = {SHORTHAND};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError( "var CLOSURE_DEFINES = {'TEMPLATE': `template`};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError( "var CLOSURE_DEFINES = {'TEMPLATE': `${template}Sub`};", ProcessDefines.CLOSURE_DEFINES_ERROR); testError("CLOSURE_DEFINES[notStringLiteral] = 42;", ProcessDefines.CLOSURE_DEFINES_ERROR); testError("CLOSURE_DEFINES['FOO'] = a;", ProcessDefines.CLOSURE_DEFINES_ERROR); } @Test public void testClosureDefinesErrors_enabledByRecognizeClosureDefines() { mode = ProcessDefines.Mode.CHECK; testError("var CLOSURE_DEFINES = {'FOO': a};", ProcessDefines.CLOSURE_DEFINES_ERROR); this.recognizeClosureDefines = false; testSame("var CLOSURE_DEFINES = {'FOO': a};"); } @Test public void testGenerateGlobalAliases() { enableJ2clPasses = true; test( """ var a = {}; /** @define {number} */ a.b = goog.define('a.b', 1); """, """ var a = {}; /** @define {number} */ a.b = 1; var jscomp$defines$a$b = a.b; """); test( """ var a = {}; /** @define {number} */ a.b = goog.define('c.d', 1); """, """ var a = {}; /** @define {number} */ a.b = 1; var jscomp$defines$c$d = a.b; """); test( "/** @define {number} */ var a = goog.define('c.d', 1);", """ /** @define {number} */ var a = 1; var jscomp$defines$c$d = a; """); test( """ /** @define {number} */ var a = goog.define('a', 1); /** @define {number} */ var b = goog.define('b', a); """, """ /** @define {number} */ var a = 1; var jscomp$defines$a = a; /** @define {number} */ var b = a; var jscomp$defines$b = b; """); } @Test public void testEs2017OrGreaterFailsWhenCompilingZone() { outputFeatureSet = FeatureSet.ES2017; testError( srcs( SourceFile.fromCode( "third_party/javascript/angular2/rc/packages/zone.js/lib/zone.closure.js", """ /** @define {boolean} */ const ENABLE_ZONES = goog.define('javascript.angular2.ENABLE_ZONES', true) """)), ProcessDefines.ZONE_NOT_SUPPORTED_WITH_NATIVE_ASYNC_AWAIT); } @Test public void testEs2017OrGreaterAllowedWhenCompilingZoneWithItDisabled() { outputFeatureSet = FeatureSet.ES2017; test( srcs( SourceFile.fromCode( "defines.js", "var CLOSURE_DEFINES = {'javascript.angular2.ENABLE_ZONES': false};"), SourceFile.fromCode( "third_party/javascript/angular2/rc/packages/zone.js/lib/zone.closure.js", """ /** @define {boolean} */ const ENABLE_ZONES = goog.define('javascript.angular2.ENABLE_ZONES', true) """))); } @Test public void testEs2017OrGreaterFailsWhenCompilingZoneWithItExplicitlyEnabled() { outputFeatureSet = FeatureSet.ES2017; testError( srcs( SourceFile.fromCode( "defines.js", "var CLOSURE_DEFINES = {'javascript.angular2.ENABLE_ZONES': true};"), SourceFile.fromCode( "third_party/javascript/angular2/rc/packages/zone.js/lib/zone.closure.js", """ /** @define {boolean} */ const ENABLE_ZONES = goog.define('javascript.angular2.ENABLE_ZONES', true) """)), ProcessDefines.ZONE_NOT_SUPPORTED_WITH_NATIVE_ASYNC_AWAIT); } @Test public void testZoneAllowedWhenCompilingLessThanEs2017() { outputFeatureSet = FeatureSet.ES2016; test( srcs( SourceFile.fromCode( "third_party/javascript/angular2/rc/packages/zone.js/lib/zone.closure.js", ""))); } private class ProcessDefinesWithInjectedNamespace implements CompilerPass { private final Compiler compiler; public ProcessDefinesWithInjectedNamespace(Compiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node js) { namespace = new GlobalNamespace(compiler, externs, js); new ProcessDefines.Builder(compiler) .putReplacements(overrides) .setMode(mode) .injectNamespace(() -> namespace) .setRecognizeClosureDefines(recognizeClosureDefines) .setEnableZonesDefineName(compiler.getOptions().getEnableZonesDefineName()) .setZoneInputPattern(compiler.getOptions().getZoneInputPattern()) .build() .process(externs, js); } } }
googleapis/google-cloud-java
35,006
java-notebooks/proto-google-cloud-notebooks-v2/src/main/java/com/google/cloud/notebooks/v2/BootDisk.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/notebooks/v2/gce_setup.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.notebooks.v2; /** * * * <pre> * The definition of a boot disk. * </pre> * * Protobuf type {@code google.cloud.notebooks.v2.BootDisk} */ public final class BootDisk extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.notebooks.v2.BootDisk) BootDiskOrBuilder { private static final long serialVersionUID = 0L; // Use BootDisk.newBuilder() to construct. private BootDisk(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BootDisk() { diskType_ = 0; diskEncryption_ = 0; kmsKey_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BootDisk(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v2.GceSetupProto .internal_static_google_cloud_notebooks_v2_BootDisk_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v2.GceSetupProto .internal_static_google_cloud_notebooks_v2_BootDisk_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v2.BootDisk.class, com.google.cloud.notebooks.v2.BootDisk.Builder.class); } public static final int DISK_SIZE_GB_FIELD_NUMBER = 1; private long diskSizeGb_ = 0L; /** * * * <pre> * Optional. The size of the boot disk in GB attached to this instance, up to * a maximum of 64000 GB (64 TB). If not specified, this defaults to the * recommended value of 150GB. * </pre> * * <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The diskSizeGb. */ @java.lang.Override public long getDiskSizeGb() { return diskSizeGb_; } public static final int DISK_TYPE_FIELD_NUMBER = 2; private int diskType_ = 0; /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for diskType. */ @java.lang.Override public int getDiskTypeValue() { return diskType_; } /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The diskType. */ @java.lang.Override public com.google.cloud.notebooks.v2.DiskType getDiskType() { com.google.cloud.notebooks.v2.DiskType result = com.google.cloud.notebooks.v2.DiskType.forNumber(diskType_); return result == null ? com.google.cloud.notebooks.v2.DiskType.UNRECOGNIZED : result; } public static final int DISK_ENCRYPTION_FIELD_NUMBER = 3; private int diskEncryption_ = 0; /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for diskEncryption. */ @java.lang.Override public int getDiskEncryptionValue() { return diskEncryption_; } /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The diskEncryption. */ @java.lang.Override public com.google.cloud.notebooks.v2.DiskEncryption getDiskEncryption() { com.google.cloud.notebooks.v2.DiskEncryption result = com.google.cloud.notebooks.v2.DiskEncryption.forNumber(diskEncryption_); return result == null ? com.google.cloud.notebooks.v2.DiskEncryption.UNRECOGNIZED : result; } public static final int KMS_KEY_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object kmsKey_ = ""; /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The kmsKey. */ @java.lang.Override public java.lang.String getKmsKey() { java.lang.Object ref = kmsKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); kmsKey_ = s; return s; } } /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The bytes for kmsKey. */ @java.lang.Override public com.google.protobuf.ByteString getKmsKeyBytes() { java.lang.Object ref = kmsKey_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); kmsKey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (diskSizeGb_ != 0L) { output.writeInt64(1, diskSizeGb_); } if (diskType_ != com.google.cloud.notebooks.v2.DiskType.DISK_TYPE_UNSPECIFIED.getNumber()) { output.writeEnum(2, diskType_); } if (diskEncryption_ != com.google.cloud.notebooks.v2.DiskEncryption.DISK_ENCRYPTION_UNSPECIFIED.getNumber()) { output.writeEnum(3, diskEncryption_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kmsKey_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, kmsKey_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (diskSizeGb_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, diskSizeGb_); } if (diskType_ != com.google.cloud.notebooks.v2.DiskType.DISK_TYPE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, diskType_); } if (diskEncryption_ != com.google.cloud.notebooks.v2.DiskEncryption.DISK_ENCRYPTION_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, diskEncryption_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kmsKey_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, kmsKey_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.notebooks.v2.BootDisk)) { return super.equals(obj); } com.google.cloud.notebooks.v2.BootDisk other = (com.google.cloud.notebooks.v2.BootDisk) obj; if (getDiskSizeGb() != other.getDiskSizeGb()) return false; if (diskType_ != other.diskType_) return false; if (diskEncryption_ != other.diskEncryption_) return false; if (!getKmsKey().equals(other.getKmsKey())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISK_SIZE_GB_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getDiskSizeGb()); hash = (37 * hash) + DISK_TYPE_FIELD_NUMBER; hash = (53 * hash) + diskType_; hash = (37 * hash) + DISK_ENCRYPTION_FIELD_NUMBER; hash = (53 * hash) + diskEncryption_; hash = (37 * hash) + KMS_KEY_FIELD_NUMBER; hash = (53 * hash) + getKmsKey().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.notebooks.v2.BootDisk parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v2.BootDisk parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v2.BootDisk parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v2.BootDisk parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.notebooks.v2.BootDisk prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The definition of a boot disk. * </pre> * * Protobuf type {@code google.cloud.notebooks.v2.BootDisk} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v2.BootDisk) com.google.cloud.notebooks.v2.BootDiskOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v2.GceSetupProto .internal_static_google_cloud_notebooks_v2_BootDisk_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v2.GceSetupProto .internal_static_google_cloud_notebooks_v2_BootDisk_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v2.BootDisk.class, com.google.cloud.notebooks.v2.BootDisk.Builder.class); } // Construct using com.google.cloud.notebooks.v2.BootDisk.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; diskSizeGb_ = 0L; diskType_ = 0; diskEncryption_ = 0; kmsKey_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.notebooks.v2.GceSetupProto .internal_static_google_cloud_notebooks_v2_BootDisk_descriptor; } @java.lang.Override public com.google.cloud.notebooks.v2.BootDisk getDefaultInstanceForType() { return com.google.cloud.notebooks.v2.BootDisk.getDefaultInstance(); } @java.lang.Override public com.google.cloud.notebooks.v2.BootDisk build() { com.google.cloud.notebooks.v2.BootDisk result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.notebooks.v2.BootDisk buildPartial() { com.google.cloud.notebooks.v2.BootDisk result = new com.google.cloud.notebooks.v2.BootDisk(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.notebooks.v2.BootDisk result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.diskSizeGb_ = diskSizeGb_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.diskType_ = diskType_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.diskEncryption_ = diskEncryption_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.kmsKey_ = kmsKey_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.notebooks.v2.BootDisk) { return mergeFrom((com.google.cloud.notebooks.v2.BootDisk) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.notebooks.v2.BootDisk other) { if (other == com.google.cloud.notebooks.v2.BootDisk.getDefaultInstance()) return this; if (other.getDiskSizeGb() != 0L) { setDiskSizeGb(other.getDiskSizeGb()); } if (other.diskType_ != 0) { setDiskTypeValue(other.getDiskTypeValue()); } if (other.diskEncryption_ != 0) { setDiskEncryptionValue(other.getDiskEncryptionValue()); } if (!other.getKmsKey().isEmpty()) { kmsKey_ = other.kmsKey_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { diskSizeGb_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { diskType_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { diskEncryption_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { kmsKey_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long diskSizeGb_; /** * * * <pre> * Optional. The size of the boot disk in GB attached to this instance, up to * a maximum of 64000 GB (64 TB). If not specified, this defaults to the * recommended value of 150GB. * </pre> * * <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The diskSizeGb. */ @java.lang.Override public long getDiskSizeGb() { return diskSizeGb_; } /** * * * <pre> * Optional. The size of the boot disk in GB attached to this instance, up to * a maximum of 64000 GB (64 TB). If not specified, this defaults to the * recommended value of 150GB. * </pre> * * <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The diskSizeGb to set. * @return This builder for chaining. */ public Builder setDiskSizeGb(long value) { diskSizeGb_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The size of the boot disk in GB attached to this instance, up to * a maximum of 64000 GB (64 TB). If not specified, this defaults to the * recommended value of 150GB. * </pre> * * <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearDiskSizeGb() { bitField0_ = (bitField0_ & ~0x00000001); diskSizeGb_ = 0L; onChanged(); return this; } private int diskType_ = 0; /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for diskType. */ @java.lang.Override public int getDiskTypeValue() { return diskType_; } /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for diskType to set. * @return This builder for chaining. */ public Builder setDiskTypeValue(int value) { diskType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The diskType. */ @java.lang.Override public com.google.cloud.notebooks.v2.DiskType getDiskType() { com.google.cloud.notebooks.v2.DiskType result = com.google.cloud.notebooks.v2.DiskType.forNumber(diskType_); return result == null ? com.google.cloud.notebooks.v2.DiskType.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The diskType to set. * @return This builder for chaining. */ public Builder setDiskType(com.google.cloud.notebooks.v2.DiskType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; diskType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Indicates the type of the disk. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearDiskType() { bitField0_ = (bitField0_ & ~0x00000002); diskType_ = 0; onChanged(); return this; } private int diskEncryption_ = 0; /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for diskEncryption. */ @java.lang.Override public int getDiskEncryptionValue() { return diskEncryption_; } /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for diskEncryption to set. * @return This builder for chaining. */ public Builder setDiskEncryptionValue(int value) { diskEncryption_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The diskEncryption. */ @java.lang.Override public com.google.cloud.notebooks.v2.DiskEncryption getDiskEncryption() { com.google.cloud.notebooks.v2.DiskEncryption result = com.google.cloud.notebooks.v2.DiskEncryption.forNumber(diskEncryption_); return result == null ? com.google.cloud.notebooks.v2.DiskEncryption.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The diskEncryption to set. * @return This builder for chaining. */ public Builder setDiskEncryption(com.google.cloud.notebooks.v2.DiskEncryption value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; diskEncryption_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Input only. Disk encryption method used on the boot and data * disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 3 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearDiskEncryption() { bitField0_ = (bitField0_ & ~0x00000004); diskEncryption_ = 0; onChanged(); return this; } private java.lang.Object kmsKey_ = ""; /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The kmsKey. */ public java.lang.String getKmsKey() { java.lang.Object ref = kmsKey_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); kmsKey_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The bytes for kmsKey. */ public com.google.protobuf.ByteString getKmsKeyBytes() { java.lang.Object ref = kmsKey_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); kmsKey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The kmsKey to set. * @return This builder for chaining. */ public Builder setKmsKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } kmsKey_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearKmsKey() { kmsKey_ = getDefaultInstance().getKmsKey(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Input only. The KMS key used to encrypt the disks, only * applicable if disk_encryption is CMEK. Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about using your own encryption keys. * </pre> * * <code> * string kms_key = 4 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The bytes for kmsKey to set. * @return This builder for chaining. */ public Builder setKmsKeyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); kmsKey_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v2.BootDisk) } // @@protoc_insertion_point(class_scope:google.cloud.notebooks.v2.BootDisk) private static final com.google.cloud.notebooks.v2.BootDisk DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.notebooks.v2.BootDisk(); } public static com.google.cloud.notebooks.v2.BootDisk getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BootDisk> PARSER = new com.google.protobuf.AbstractParser<BootDisk>() { @java.lang.Override public BootDisk parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BootDisk> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BootDisk> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.notebooks.v2.BootDisk getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,118
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/errors/AdGroupCriterionErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/errors/ad_group_criterion_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.errors; /** * <pre> * Container for enum describing possible ad group criterion errors. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum} */ public final class AdGroupCriterionErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum) AdGroupCriterionErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionErrorEnum.newBuilder() to construct. private AdGroupCriterionErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_AdGroupCriterionErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.class, com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.Builder.class); } /** * <pre> * Enum describing possible ad group criterion errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError} */ public enum AdGroupCriterionError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * No link found between the AdGroupCriterion and the label. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST = 2;</code> */ AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST(2), /** * <pre> * The label has already been attached to the AdGroupCriterion. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS = 3;</code> */ AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS(3), /** * <pre> * Negative AdGroupCriterion cannot have labels. * </pre> * * <code>CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION = 4;</code> */ CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION(4), /** * <pre> * Too many operations for a single call. * </pre> * * <code>TOO_MANY_OPERATIONS = 5;</code> */ TOO_MANY_OPERATIONS(5), /** * <pre> * Negative ad group criteria are not updateable. * </pre> * * <code>CANT_UPDATE_NEGATIVE = 6;</code> */ CANT_UPDATE_NEGATIVE(6), /** * <pre> * Concrete type of criterion (keyword v.s. placement) is required for ADD * and SET operations. * </pre> * * <code>CONCRETE_TYPE_REQUIRED = 7;</code> */ CONCRETE_TYPE_REQUIRED(7), /** * <pre> * Bid is incompatible with ad group's bidding settings. * </pre> * * <code>BID_INCOMPATIBLE_WITH_ADGROUP = 8;</code> */ BID_INCOMPATIBLE_WITH_ADGROUP(8), /** * <pre> * Cannot target and exclude the same criterion at once. * </pre> * * <code>CANNOT_TARGET_AND_EXCLUDE = 9;</code> */ CANNOT_TARGET_AND_EXCLUDE(9), /** * <pre> * The URL of a placement is invalid. * </pre> * * <code>ILLEGAL_URL = 10;</code> */ ILLEGAL_URL(10), /** * <pre> * Keyword text was invalid. * </pre> * * <code>INVALID_KEYWORD_TEXT = 11;</code> */ INVALID_KEYWORD_TEXT(11), /** * <pre> * Destination URL was invalid. * </pre> * * <code>INVALID_DESTINATION_URL = 12;</code> */ INVALID_DESTINATION_URL(12), /** * <pre> * The destination url must contain at least one tag (for example, {lpurl}) * </pre> * * <code>MISSING_DESTINATION_URL_TAG = 13;</code> */ MISSING_DESTINATION_URL_TAG(13), /** * <pre> * Keyword-level cpm bid is not supported * </pre> * * <code>KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM = 14;</code> */ KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM(14), /** * <pre> * For example, cannot add a biddable ad group criterion that had been * removed. * </pre> * * <code>INVALID_USER_STATUS = 15;</code> */ INVALID_USER_STATUS(15), /** * <pre> * Criteria type cannot be targeted for the ad group. Either the account is * restricted to keywords only, the criteria type is incompatible with the * campaign's bidding strategy, or the criteria type can only be applied to * campaigns. * </pre> * * <code>CANNOT_ADD_CRITERIA_TYPE = 16;</code> */ CANNOT_ADD_CRITERIA_TYPE(16), /** * <pre> * Criteria type cannot be excluded for the ad group. Refer to the * documentation for a specific criterion to check if it is excludable. * </pre> * * <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 17;</code> */ CANNOT_EXCLUDE_CRITERIA_TYPE(17), /** * <pre> * Partial failure is not supported for shopping campaign mutate operations. * </pre> * * <code>CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE = 27;</code> */ CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE(27), /** * <pre> * Operations in the mutate request changes too many shopping ad groups. * Split requests for multiple shopping ad groups across multiple * requests. * </pre> * * <code>OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS = 28;</code> */ OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS(28), /** * <pre> * Not allowed to modify url fields of an ad group criterion if there are * duplicate elements for that ad group criterion in the request. * </pre> * * <code>CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS = 29;</code> */ CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS(29), /** * <pre> * Cannot set url fields without also setting final urls. * </pre> * * <code>CANNOT_SET_WITHOUT_FINAL_URLS = 30;</code> */ CANNOT_SET_WITHOUT_FINAL_URLS(30), /** * <pre> * Cannot clear final urls if final mobile urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST = 31;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST(31), /** * <pre> * Cannot clear final urls if final app urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST = 32;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST(32), /** * <pre> * Cannot clear final urls if tracking url template exists. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS = 33;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS(33), /** * <pre> * Cannot clear final urls if url custom parameters exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST = 34;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST(34), /** * <pre> * Cannot set both destination url and final urls. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS = 35;</code> */ CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS(35), /** * <pre> * Cannot set both destination url and tracking url template. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE = 36;</code> */ CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE(36), /** * <pre> * Final urls are not supported for this criterion type. * </pre> * * <code>FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 37;</code> */ FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE(37), /** * <pre> * Final mobile urls are not supported for this criterion type. * </pre> * * <code>FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 38;</code> */ FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE(38), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * No link found between the AdGroupCriterion and the label. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST = 2;</code> */ public static final int AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST_VALUE = 2; /** * <pre> * The label has already been attached to the AdGroupCriterion. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS = 3;</code> */ public static final int AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS_VALUE = 3; /** * <pre> * Negative AdGroupCriterion cannot have labels. * </pre> * * <code>CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION = 4;</code> */ public static final int CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION_VALUE = 4; /** * <pre> * Too many operations for a single call. * </pre> * * <code>TOO_MANY_OPERATIONS = 5;</code> */ public static final int TOO_MANY_OPERATIONS_VALUE = 5; /** * <pre> * Negative ad group criteria are not updateable. * </pre> * * <code>CANT_UPDATE_NEGATIVE = 6;</code> */ public static final int CANT_UPDATE_NEGATIVE_VALUE = 6; /** * <pre> * Concrete type of criterion (keyword v.s. placement) is required for ADD * and SET operations. * </pre> * * <code>CONCRETE_TYPE_REQUIRED = 7;</code> */ public static final int CONCRETE_TYPE_REQUIRED_VALUE = 7; /** * <pre> * Bid is incompatible with ad group's bidding settings. * </pre> * * <code>BID_INCOMPATIBLE_WITH_ADGROUP = 8;</code> */ public static final int BID_INCOMPATIBLE_WITH_ADGROUP_VALUE = 8; /** * <pre> * Cannot target and exclude the same criterion at once. * </pre> * * <code>CANNOT_TARGET_AND_EXCLUDE = 9;</code> */ public static final int CANNOT_TARGET_AND_EXCLUDE_VALUE = 9; /** * <pre> * The URL of a placement is invalid. * </pre> * * <code>ILLEGAL_URL = 10;</code> */ public static final int ILLEGAL_URL_VALUE = 10; /** * <pre> * Keyword text was invalid. * </pre> * * <code>INVALID_KEYWORD_TEXT = 11;</code> */ public static final int INVALID_KEYWORD_TEXT_VALUE = 11; /** * <pre> * Destination URL was invalid. * </pre> * * <code>INVALID_DESTINATION_URL = 12;</code> */ public static final int INVALID_DESTINATION_URL_VALUE = 12; /** * <pre> * The destination url must contain at least one tag (for example, {lpurl}) * </pre> * * <code>MISSING_DESTINATION_URL_TAG = 13;</code> */ public static final int MISSING_DESTINATION_URL_TAG_VALUE = 13; /** * <pre> * Keyword-level cpm bid is not supported * </pre> * * <code>KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM = 14;</code> */ public static final int KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM_VALUE = 14; /** * <pre> * For example, cannot add a biddable ad group criterion that had been * removed. * </pre> * * <code>INVALID_USER_STATUS = 15;</code> */ public static final int INVALID_USER_STATUS_VALUE = 15; /** * <pre> * Criteria type cannot be targeted for the ad group. Either the account is * restricted to keywords only, the criteria type is incompatible with the * campaign's bidding strategy, or the criteria type can only be applied to * campaigns. * </pre> * * <code>CANNOT_ADD_CRITERIA_TYPE = 16;</code> */ public static final int CANNOT_ADD_CRITERIA_TYPE_VALUE = 16; /** * <pre> * Criteria type cannot be excluded for the ad group. Refer to the * documentation for a specific criterion to check if it is excludable. * </pre> * * <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 17;</code> */ public static final int CANNOT_EXCLUDE_CRITERIA_TYPE_VALUE = 17; /** * <pre> * Partial failure is not supported for shopping campaign mutate operations. * </pre> * * <code>CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE = 27;</code> */ public static final int CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE_VALUE = 27; /** * <pre> * Operations in the mutate request changes too many shopping ad groups. * Split requests for multiple shopping ad groups across multiple * requests. * </pre> * * <code>OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS = 28;</code> */ public static final int OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS_VALUE = 28; /** * <pre> * Not allowed to modify url fields of an ad group criterion if there are * duplicate elements for that ad group criterion in the request. * </pre> * * <code>CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS = 29;</code> */ public static final int CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS_VALUE = 29; /** * <pre> * Cannot set url fields without also setting final urls. * </pre> * * <code>CANNOT_SET_WITHOUT_FINAL_URLS = 30;</code> */ public static final int CANNOT_SET_WITHOUT_FINAL_URLS_VALUE = 30; /** * <pre> * Cannot clear final urls if final mobile urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST = 31;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST_VALUE = 31; /** * <pre> * Cannot clear final urls if final app urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST = 32;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST_VALUE = 32; /** * <pre> * Cannot clear final urls if tracking url template exists. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS = 33;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS_VALUE = 33; /** * <pre> * Cannot clear final urls if url custom parameters exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST = 34;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST_VALUE = 34; /** * <pre> * Cannot set both destination url and final urls. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS = 35;</code> */ public static final int CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS_VALUE = 35; /** * <pre> * Cannot set both destination url and tracking url template. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE = 36;</code> */ public static final int CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE_VALUE = 36; /** * <pre> * Final urls are not supported for this criterion type. * </pre> * * <code>FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 37;</code> */ public static final int FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 37; /** * <pre> * Final mobile urls are not supported for this criterion type. * </pre> * * <code>FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 38;</code> */ public static final int FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 38; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AdGroupCriterionError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AdGroupCriterionError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST; case 3: return AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS; case 4: return CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION; case 5: return TOO_MANY_OPERATIONS; case 6: return CANT_UPDATE_NEGATIVE; case 7: return CONCRETE_TYPE_REQUIRED; case 8: return BID_INCOMPATIBLE_WITH_ADGROUP; case 9: return CANNOT_TARGET_AND_EXCLUDE; case 10: return ILLEGAL_URL; case 11: return INVALID_KEYWORD_TEXT; case 12: return INVALID_DESTINATION_URL; case 13: return MISSING_DESTINATION_URL_TAG; case 14: return KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM; case 15: return INVALID_USER_STATUS; case 16: return CANNOT_ADD_CRITERIA_TYPE; case 17: return CANNOT_EXCLUDE_CRITERIA_TYPE; case 27: return CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE; case 28: return OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS; case 29: return CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS; case 30: return CANNOT_SET_WITHOUT_FINAL_URLS; case 31: return CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST; case 32: return CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST; case 33: return CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS; case 34: return CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST; case 35: return CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS; case 36: return CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE; case 37: return FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE; case 38: return FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AdGroupCriterionError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< AdGroupCriterionError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AdGroupCriterionError>() { public AdGroupCriterionError findValueByNumber(int number) { return AdGroupCriterionError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final AdGroupCriterionError[] VALUES = values(); public static AdGroupCriterionError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AdGroupCriterionError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum other = (com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible ad group criterion errors. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum) com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_AdGroupCriterionErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.class, com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v19_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum build() { com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum buildPartial() { com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum result = new com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum) { return mergeFrom((com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum other) { if (other == com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum) private static final com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum(); } public static com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionErrorEnum> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionErrorEnum>() { @java.lang.Override public AdGroupCriterionErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.errors.AdGroupCriterionErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,118
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/errors/AdGroupCriterionErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/errors/ad_group_criterion_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.errors; /** * <pre> * Container for enum describing possible ad group criterion errors. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum} */ public final class AdGroupCriterionErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum) AdGroupCriterionErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionErrorEnum.newBuilder() to construct. private AdGroupCriterionErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_AdGroupCriterionErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.class, com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.Builder.class); } /** * <pre> * Enum describing possible ad group criterion errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError} */ public enum AdGroupCriterionError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * No link found between the AdGroupCriterion and the label. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST = 2;</code> */ AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST(2), /** * <pre> * The label has already been attached to the AdGroupCriterion. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS = 3;</code> */ AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS(3), /** * <pre> * Negative AdGroupCriterion cannot have labels. * </pre> * * <code>CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION = 4;</code> */ CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION(4), /** * <pre> * Too many operations for a single call. * </pre> * * <code>TOO_MANY_OPERATIONS = 5;</code> */ TOO_MANY_OPERATIONS(5), /** * <pre> * Negative ad group criteria are not updateable. * </pre> * * <code>CANT_UPDATE_NEGATIVE = 6;</code> */ CANT_UPDATE_NEGATIVE(6), /** * <pre> * Concrete type of criterion (keyword v.s. placement) is required for ADD * and SET operations. * </pre> * * <code>CONCRETE_TYPE_REQUIRED = 7;</code> */ CONCRETE_TYPE_REQUIRED(7), /** * <pre> * Bid is incompatible with ad group's bidding settings. * </pre> * * <code>BID_INCOMPATIBLE_WITH_ADGROUP = 8;</code> */ BID_INCOMPATIBLE_WITH_ADGROUP(8), /** * <pre> * Cannot target and exclude the same criterion at once. * </pre> * * <code>CANNOT_TARGET_AND_EXCLUDE = 9;</code> */ CANNOT_TARGET_AND_EXCLUDE(9), /** * <pre> * The URL of a placement is invalid. * </pre> * * <code>ILLEGAL_URL = 10;</code> */ ILLEGAL_URL(10), /** * <pre> * Keyword text was invalid. * </pre> * * <code>INVALID_KEYWORD_TEXT = 11;</code> */ INVALID_KEYWORD_TEXT(11), /** * <pre> * Destination URL was invalid. * </pre> * * <code>INVALID_DESTINATION_URL = 12;</code> */ INVALID_DESTINATION_URL(12), /** * <pre> * The destination url must contain at least one tag (for example, {lpurl}) * </pre> * * <code>MISSING_DESTINATION_URL_TAG = 13;</code> */ MISSING_DESTINATION_URL_TAG(13), /** * <pre> * Keyword-level cpm bid is not supported * </pre> * * <code>KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM = 14;</code> */ KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM(14), /** * <pre> * For example, cannot add a biddable ad group criterion that had been * removed. * </pre> * * <code>INVALID_USER_STATUS = 15;</code> */ INVALID_USER_STATUS(15), /** * <pre> * Criteria type cannot be targeted for the ad group. Either the account is * restricted to keywords only, the criteria type is incompatible with the * campaign's bidding strategy, or the criteria type can only be applied to * campaigns. * </pre> * * <code>CANNOT_ADD_CRITERIA_TYPE = 16;</code> */ CANNOT_ADD_CRITERIA_TYPE(16), /** * <pre> * Criteria type cannot be excluded for the ad group. Refer to the * documentation for a specific criterion to check if it is excludable. * </pre> * * <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 17;</code> */ CANNOT_EXCLUDE_CRITERIA_TYPE(17), /** * <pre> * Partial failure is not supported for shopping campaign mutate operations. * </pre> * * <code>CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE = 27;</code> */ CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE(27), /** * <pre> * Operations in the mutate request changes too many shopping ad groups. * Split requests for multiple shopping ad groups across multiple * requests. * </pre> * * <code>OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS = 28;</code> */ OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS(28), /** * <pre> * Not allowed to modify url fields of an ad group criterion if there are * duplicate elements for that ad group criterion in the request. * </pre> * * <code>CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS = 29;</code> */ CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS(29), /** * <pre> * Cannot set url fields without also setting final urls. * </pre> * * <code>CANNOT_SET_WITHOUT_FINAL_URLS = 30;</code> */ CANNOT_SET_WITHOUT_FINAL_URLS(30), /** * <pre> * Cannot clear final urls if final mobile urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST = 31;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST(31), /** * <pre> * Cannot clear final urls if final app urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST = 32;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST(32), /** * <pre> * Cannot clear final urls if tracking url template exists. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS = 33;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS(33), /** * <pre> * Cannot clear final urls if url custom parameters exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST = 34;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST(34), /** * <pre> * Cannot set both destination url and final urls. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS = 35;</code> */ CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS(35), /** * <pre> * Cannot set both destination url and tracking url template. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE = 36;</code> */ CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE(36), /** * <pre> * Final urls are not supported for this criterion type. * </pre> * * <code>FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 37;</code> */ FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE(37), /** * <pre> * Final mobile urls are not supported for this criterion type. * </pre> * * <code>FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 38;</code> */ FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE(38), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * No link found between the AdGroupCriterion and the label. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST = 2;</code> */ public static final int AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST_VALUE = 2; /** * <pre> * The label has already been attached to the AdGroupCriterion. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS = 3;</code> */ public static final int AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS_VALUE = 3; /** * <pre> * Negative AdGroupCriterion cannot have labels. * </pre> * * <code>CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION = 4;</code> */ public static final int CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION_VALUE = 4; /** * <pre> * Too many operations for a single call. * </pre> * * <code>TOO_MANY_OPERATIONS = 5;</code> */ public static final int TOO_MANY_OPERATIONS_VALUE = 5; /** * <pre> * Negative ad group criteria are not updateable. * </pre> * * <code>CANT_UPDATE_NEGATIVE = 6;</code> */ public static final int CANT_UPDATE_NEGATIVE_VALUE = 6; /** * <pre> * Concrete type of criterion (keyword v.s. placement) is required for ADD * and SET operations. * </pre> * * <code>CONCRETE_TYPE_REQUIRED = 7;</code> */ public static final int CONCRETE_TYPE_REQUIRED_VALUE = 7; /** * <pre> * Bid is incompatible with ad group's bidding settings. * </pre> * * <code>BID_INCOMPATIBLE_WITH_ADGROUP = 8;</code> */ public static final int BID_INCOMPATIBLE_WITH_ADGROUP_VALUE = 8; /** * <pre> * Cannot target and exclude the same criterion at once. * </pre> * * <code>CANNOT_TARGET_AND_EXCLUDE = 9;</code> */ public static final int CANNOT_TARGET_AND_EXCLUDE_VALUE = 9; /** * <pre> * The URL of a placement is invalid. * </pre> * * <code>ILLEGAL_URL = 10;</code> */ public static final int ILLEGAL_URL_VALUE = 10; /** * <pre> * Keyword text was invalid. * </pre> * * <code>INVALID_KEYWORD_TEXT = 11;</code> */ public static final int INVALID_KEYWORD_TEXT_VALUE = 11; /** * <pre> * Destination URL was invalid. * </pre> * * <code>INVALID_DESTINATION_URL = 12;</code> */ public static final int INVALID_DESTINATION_URL_VALUE = 12; /** * <pre> * The destination url must contain at least one tag (for example, {lpurl}) * </pre> * * <code>MISSING_DESTINATION_URL_TAG = 13;</code> */ public static final int MISSING_DESTINATION_URL_TAG_VALUE = 13; /** * <pre> * Keyword-level cpm bid is not supported * </pre> * * <code>KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM = 14;</code> */ public static final int KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM_VALUE = 14; /** * <pre> * For example, cannot add a biddable ad group criterion that had been * removed. * </pre> * * <code>INVALID_USER_STATUS = 15;</code> */ public static final int INVALID_USER_STATUS_VALUE = 15; /** * <pre> * Criteria type cannot be targeted for the ad group. Either the account is * restricted to keywords only, the criteria type is incompatible with the * campaign's bidding strategy, or the criteria type can only be applied to * campaigns. * </pre> * * <code>CANNOT_ADD_CRITERIA_TYPE = 16;</code> */ public static final int CANNOT_ADD_CRITERIA_TYPE_VALUE = 16; /** * <pre> * Criteria type cannot be excluded for the ad group. Refer to the * documentation for a specific criterion to check if it is excludable. * </pre> * * <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 17;</code> */ public static final int CANNOT_EXCLUDE_CRITERIA_TYPE_VALUE = 17; /** * <pre> * Partial failure is not supported for shopping campaign mutate operations. * </pre> * * <code>CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE = 27;</code> */ public static final int CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE_VALUE = 27; /** * <pre> * Operations in the mutate request changes too many shopping ad groups. * Split requests for multiple shopping ad groups across multiple * requests. * </pre> * * <code>OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS = 28;</code> */ public static final int OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS_VALUE = 28; /** * <pre> * Not allowed to modify url fields of an ad group criterion if there are * duplicate elements for that ad group criterion in the request. * </pre> * * <code>CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS = 29;</code> */ public static final int CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS_VALUE = 29; /** * <pre> * Cannot set url fields without also setting final urls. * </pre> * * <code>CANNOT_SET_WITHOUT_FINAL_URLS = 30;</code> */ public static final int CANNOT_SET_WITHOUT_FINAL_URLS_VALUE = 30; /** * <pre> * Cannot clear final urls if final mobile urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST = 31;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST_VALUE = 31; /** * <pre> * Cannot clear final urls if final app urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST = 32;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST_VALUE = 32; /** * <pre> * Cannot clear final urls if tracking url template exists. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS = 33;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS_VALUE = 33; /** * <pre> * Cannot clear final urls if url custom parameters exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST = 34;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST_VALUE = 34; /** * <pre> * Cannot set both destination url and final urls. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS = 35;</code> */ public static final int CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS_VALUE = 35; /** * <pre> * Cannot set both destination url and tracking url template. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE = 36;</code> */ public static final int CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE_VALUE = 36; /** * <pre> * Final urls are not supported for this criterion type. * </pre> * * <code>FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 37;</code> */ public static final int FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 37; /** * <pre> * Final mobile urls are not supported for this criterion type. * </pre> * * <code>FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 38;</code> */ public static final int FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 38; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AdGroupCriterionError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AdGroupCriterionError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST; case 3: return AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS; case 4: return CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION; case 5: return TOO_MANY_OPERATIONS; case 6: return CANT_UPDATE_NEGATIVE; case 7: return CONCRETE_TYPE_REQUIRED; case 8: return BID_INCOMPATIBLE_WITH_ADGROUP; case 9: return CANNOT_TARGET_AND_EXCLUDE; case 10: return ILLEGAL_URL; case 11: return INVALID_KEYWORD_TEXT; case 12: return INVALID_DESTINATION_URL; case 13: return MISSING_DESTINATION_URL_TAG; case 14: return KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM; case 15: return INVALID_USER_STATUS; case 16: return CANNOT_ADD_CRITERIA_TYPE; case 17: return CANNOT_EXCLUDE_CRITERIA_TYPE; case 27: return CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE; case 28: return OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS; case 29: return CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS; case 30: return CANNOT_SET_WITHOUT_FINAL_URLS; case 31: return CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST; case 32: return CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST; case 33: return CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS; case 34: return CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST; case 35: return CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS; case 36: return CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE; case 37: return FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE; case 38: return FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AdGroupCriterionError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< AdGroupCriterionError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AdGroupCriterionError>() { public AdGroupCriterionError findValueByNumber(int number) { return AdGroupCriterionError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final AdGroupCriterionError[] VALUES = values(); public static AdGroupCriterionError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AdGroupCriterionError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum other = (com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible ad group criterion errors. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum) com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_AdGroupCriterionErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.class, com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v20_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum build() { com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum buildPartial() { com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum result = new com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum) { return mergeFrom((com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum other) { if (other == com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum) private static final com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum(); } public static com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionErrorEnum> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionErrorEnum>() { @java.lang.Override public AdGroupCriterionErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.errors.AdGroupCriterionErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,118
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/AdGroupCriterionErrorEnum.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/errors/ad_group_criterion_error.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.errors; /** * <pre> * Container for enum describing possible ad group criterion errors. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum} */ public final class AdGroupCriterionErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum) AdGroupCriterionErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionErrorEnum.newBuilder() to construct. private AdGroupCriterionErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionErrorEnum(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_AdGroupCriterionErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.class, com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.Builder.class); } /** * <pre> * Enum describing possible ad group criterion errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError} */ public enum AdGroupCriterionError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * No link found between the AdGroupCriterion and the label. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST = 2;</code> */ AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST(2), /** * <pre> * The label has already been attached to the AdGroupCriterion. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS = 3;</code> */ AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS(3), /** * <pre> * Negative AdGroupCriterion cannot have labels. * </pre> * * <code>CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION = 4;</code> */ CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION(4), /** * <pre> * Too many operations for a single call. * </pre> * * <code>TOO_MANY_OPERATIONS = 5;</code> */ TOO_MANY_OPERATIONS(5), /** * <pre> * Negative ad group criteria are not updateable. * </pre> * * <code>CANT_UPDATE_NEGATIVE = 6;</code> */ CANT_UPDATE_NEGATIVE(6), /** * <pre> * Concrete type of criterion (keyword v.s. placement) is required for ADD * and SET operations. * </pre> * * <code>CONCRETE_TYPE_REQUIRED = 7;</code> */ CONCRETE_TYPE_REQUIRED(7), /** * <pre> * Bid is incompatible with ad group's bidding settings. * </pre> * * <code>BID_INCOMPATIBLE_WITH_ADGROUP = 8;</code> */ BID_INCOMPATIBLE_WITH_ADGROUP(8), /** * <pre> * Cannot target and exclude the same criterion at once. * </pre> * * <code>CANNOT_TARGET_AND_EXCLUDE = 9;</code> */ CANNOT_TARGET_AND_EXCLUDE(9), /** * <pre> * The URL of a placement is invalid. * </pre> * * <code>ILLEGAL_URL = 10;</code> */ ILLEGAL_URL(10), /** * <pre> * Keyword text was invalid. * </pre> * * <code>INVALID_KEYWORD_TEXT = 11;</code> */ INVALID_KEYWORD_TEXT(11), /** * <pre> * Destination URL was invalid. * </pre> * * <code>INVALID_DESTINATION_URL = 12;</code> */ INVALID_DESTINATION_URL(12), /** * <pre> * The destination url must contain at least one tag (for example, {lpurl}) * </pre> * * <code>MISSING_DESTINATION_URL_TAG = 13;</code> */ MISSING_DESTINATION_URL_TAG(13), /** * <pre> * Keyword-level cpm bid is not supported * </pre> * * <code>KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM = 14;</code> */ KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM(14), /** * <pre> * For example, cannot add a biddable ad group criterion that had been * removed. * </pre> * * <code>INVALID_USER_STATUS = 15;</code> */ INVALID_USER_STATUS(15), /** * <pre> * Criteria type cannot be targeted for the ad group. Either the account is * restricted to keywords only, the criteria type is incompatible with the * campaign's bidding strategy, or the criteria type can only be applied to * campaigns. * </pre> * * <code>CANNOT_ADD_CRITERIA_TYPE = 16;</code> */ CANNOT_ADD_CRITERIA_TYPE(16), /** * <pre> * Criteria type cannot be excluded for the ad group. Refer to the * documentation for a specific criterion to check if it is excludable. * </pre> * * <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 17;</code> */ CANNOT_EXCLUDE_CRITERIA_TYPE(17), /** * <pre> * Partial failure is not supported for shopping campaign mutate operations. * </pre> * * <code>CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE = 27;</code> */ CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE(27), /** * <pre> * Operations in the mutate request changes too many shopping ad groups. * Split requests for multiple shopping ad groups across multiple * requests. * </pre> * * <code>OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS = 28;</code> */ OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS(28), /** * <pre> * Not allowed to modify url fields of an ad group criterion if there are * duplicate elements for that ad group criterion in the request. * </pre> * * <code>CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS = 29;</code> */ CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS(29), /** * <pre> * Cannot set url fields without also setting final urls. * </pre> * * <code>CANNOT_SET_WITHOUT_FINAL_URLS = 30;</code> */ CANNOT_SET_WITHOUT_FINAL_URLS(30), /** * <pre> * Cannot clear final urls if final mobile urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST = 31;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST(31), /** * <pre> * Cannot clear final urls if final app urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST = 32;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST(32), /** * <pre> * Cannot clear final urls if tracking url template exists. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS = 33;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS(33), /** * <pre> * Cannot clear final urls if url custom parameters exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST = 34;</code> */ CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST(34), /** * <pre> * Cannot set both destination url and final urls. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS = 35;</code> */ CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS(35), /** * <pre> * Cannot set both destination url and tracking url template. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE = 36;</code> */ CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE(36), /** * <pre> * Final urls are not supported for this criterion type. * </pre> * * <code>FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 37;</code> */ FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE(37), /** * <pre> * Final mobile urls are not supported for this criterion type. * </pre> * * <code>FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 38;</code> */ FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE(38), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * No link found between the AdGroupCriterion and the label. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST = 2;</code> */ public static final int AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST_VALUE = 2; /** * <pre> * The label has already been attached to the AdGroupCriterion. * </pre> * * <code>AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS = 3;</code> */ public static final int AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS_VALUE = 3; /** * <pre> * Negative AdGroupCriterion cannot have labels. * </pre> * * <code>CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION = 4;</code> */ public static final int CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION_VALUE = 4; /** * <pre> * Too many operations for a single call. * </pre> * * <code>TOO_MANY_OPERATIONS = 5;</code> */ public static final int TOO_MANY_OPERATIONS_VALUE = 5; /** * <pre> * Negative ad group criteria are not updateable. * </pre> * * <code>CANT_UPDATE_NEGATIVE = 6;</code> */ public static final int CANT_UPDATE_NEGATIVE_VALUE = 6; /** * <pre> * Concrete type of criterion (keyword v.s. placement) is required for ADD * and SET operations. * </pre> * * <code>CONCRETE_TYPE_REQUIRED = 7;</code> */ public static final int CONCRETE_TYPE_REQUIRED_VALUE = 7; /** * <pre> * Bid is incompatible with ad group's bidding settings. * </pre> * * <code>BID_INCOMPATIBLE_WITH_ADGROUP = 8;</code> */ public static final int BID_INCOMPATIBLE_WITH_ADGROUP_VALUE = 8; /** * <pre> * Cannot target and exclude the same criterion at once. * </pre> * * <code>CANNOT_TARGET_AND_EXCLUDE = 9;</code> */ public static final int CANNOT_TARGET_AND_EXCLUDE_VALUE = 9; /** * <pre> * The URL of a placement is invalid. * </pre> * * <code>ILLEGAL_URL = 10;</code> */ public static final int ILLEGAL_URL_VALUE = 10; /** * <pre> * Keyword text was invalid. * </pre> * * <code>INVALID_KEYWORD_TEXT = 11;</code> */ public static final int INVALID_KEYWORD_TEXT_VALUE = 11; /** * <pre> * Destination URL was invalid. * </pre> * * <code>INVALID_DESTINATION_URL = 12;</code> */ public static final int INVALID_DESTINATION_URL_VALUE = 12; /** * <pre> * The destination url must contain at least one tag (for example, {lpurl}) * </pre> * * <code>MISSING_DESTINATION_URL_TAG = 13;</code> */ public static final int MISSING_DESTINATION_URL_TAG_VALUE = 13; /** * <pre> * Keyword-level cpm bid is not supported * </pre> * * <code>KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM = 14;</code> */ public static final int KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM_VALUE = 14; /** * <pre> * For example, cannot add a biddable ad group criterion that had been * removed. * </pre> * * <code>INVALID_USER_STATUS = 15;</code> */ public static final int INVALID_USER_STATUS_VALUE = 15; /** * <pre> * Criteria type cannot be targeted for the ad group. Either the account is * restricted to keywords only, the criteria type is incompatible with the * campaign's bidding strategy, or the criteria type can only be applied to * campaigns. * </pre> * * <code>CANNOT_ADD_CRITERIA_TYPE = 16;</code> */ public static final int CANNOT_ADD_CRITERIA_TYPE_VALUE = 16; /** * <pre> * Criteria type cannot be excluded for the ad group. Refer to the * documentation for a specific criterion to check if it is excludable. * </pre> * * <code>CANNOT_EXCLUDE_CRITERIA_TYPE = 17;</code> */ public static final int CANNOT_EXCLUDE_CRITERIA_TYPE_VALUE = 17; /** * <pre> * Partial failure is not supported for shopping campaign mutate operations. * </pre> * * <code>CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE = 27;</code> */ public static final int CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE_VALUE = 27; /** * <pre> * Operations in the mutate request changes too many shopping ad groups. * Split requests for multiple shopping ad groups across multiple * requests. * </pre> * * <code>OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS = 28;</code> */ public static final int OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS_VALUE = 28; /** * <pre> * Not allowed to modify url fields of an ad group criterion if there are * duplicate elements for that ad group criterion in the request. * </pre> * * <code>CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS = 29;</code> */ public static final int CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS_VALUE = 29; /** * <pre> * Cannot set url fields without also setting final urls. * </pre> * * <code>CANNOT_SET_WITHOUT_FINAL_URLS = 30;</code> */ public static final int CANNOT_SET_WITHOUT_FINAL_URLS_VALUE = 30; /** * <pre> * Cannot clear final urls if final mobile urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST = 31;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST_VALUE = 31; /** * <pre> * Cannot clear final urls if final app urls exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST = 32;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST_VALUE = 32; /** * <pre> * Cannot clear final urls if tracking url template exists. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS = 33;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS_VALUE = 33; /** * <pre> * Cannot clear final urls if url custom parameters exist. * </pre> * * <code>CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST = 34;</code> */ public static final int CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST_VALUE = 34; /** * <pre> * Cannot set both destination url and final urls. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS = 35;</code> */ public static final int CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS_VALUE = 35; /** * <pre> * Cannot set both destination url and tracking url template. * </pre> * * <code>CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE = 36;</code> */ public static final int CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE_VALUE = 36; /** * <pre> * Final urls are not supported for this criterion type. * </pre> * * <code>FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 37;</code> */ public static final int FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 37; /** * <pre> * Final mobile urls are not supported for this criterion type. * </pre> * * <code>FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE = 38;</code> */ public static final int FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE_VALUE = 38; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AdGroupCriterionError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AdGroupCriterionError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST; case 3: return AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS; case 4: return CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION; case 5: return TOO_MANY_OPERATIONS; case 6: return CANT_UPDATE_NEGATIVE; case 7: return CONCRETE_TYPE_REQUIRED; case 8: return BID_INCOMPATIBLE_WITH_ADGROUP; case 9: return CANNOT_TARGET_AND_EXCLUDE; case 10: return ILLEGAL_URL; case 11: return INVALID_KEYWORD_TEXT; case 12: return INVALID_DESTINATION_URL; case 13: return MISSING_DESTINATION_URL_TAG; case 14: return KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM; case 15: return INVALID_USER_STATUS; case 16: return CANNOT_ADD_CRITERIA_TYPE; case 17: return CANNOT_EXCLUDE_CRITERIA_TYPE; case 27: return CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE; case 28: return OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS; case 29: return CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS; case 30: return CANNOT_SET_WITHOUT_FINAL_URLS; case 31: return CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST; case 32: return CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST; case 33: return CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS; case 34: return CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST; case 35: return CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS; case 36: return CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE; case 37: return FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE; case 38: return FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AdGroupCriterionError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< AdGroupCriterionError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AdGroupCriterionError>() { public AdGroupCriterionError findValueByNumber(int number) { return AdGroupCriterionError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final AdGroupCriterionError[] VALUES = values(); public static AdGroupCriterionError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AdGroupCriterionError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.AdGroupCriterionError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum other = (com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum) obj; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible ad group criterion errors. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum) com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_AdGroupCriterionErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.class, com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorProto.internal_static_google_ads_googleads_v21_errors_AdGroupCriterionErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum build() { com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum buildPartial() { com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum result = new com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum) { return mergeFrom((com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum other) { if (other == com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum) private static final com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum(); } public static com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionErrorEnum> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionErrorEnum>() { @java.lang.Override public AdGroupCriterionErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.errors.AdGroupCriterionErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
35,079
clients/google-api-services-reseller/v1/1.30.1/com/google/api/services/reseller/model/Subscription.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reseller.model; /** * JSON template for a subscription. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Enterprise Apps Reseller API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Subscription extends com.google.api.client.json.GenericJson { /** * Read-only field that returns the current billing method for a subscription. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String billingMethod; /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long creationTime; /** * Primary domain name of the customer * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerDomain; /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customerId; /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dealCode; /** * Identifies the resource as a Subscription. Value: reseller#subscription * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private Plan plan; /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String purchaseOrderId; /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * The value may be {@code null}. */ @com.google.api.client.util.Key private RenewalSettings renewalSettings; /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String resourceUiUrl; /** * This is a required property. The number and limit of user seat licenses in the plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private Seats seats; /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuId; /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String skuName; /** * This is an optional property. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String subscriptionId; /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - RESELLER_INITIATED * - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The customer's trial expired * without a plan selected. - OTHER - The customer is suspended for an internal Google reason * (e.g. abuse or otherwise). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> suspensionReasons; /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private TransferInfo transferInfo; /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * The value may be {@code null}. */ @com.google.api.client.util.Key private TrialSettings trialSettings; /** * Read-only field that returns the current billing method for a subscription. * @return value or {@code null} for none */ public java.lang.String getBillingMethod() { return billingMethod; } /** * Read-only field that returns the current billing method for a subscription. * @param billingMethod billingMethod or {@code null} for none */ public Subscription setBillingMethod(java.lang.String billingMethod) { this.billingMethod = billingMethod; return this; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getCreationTime() { return creationTime; } /** * The creationTime property is the date when subscription was created. It is in milliseconds * using the Epoch format. See an example Epoch converter. * @param creationTime creationTime or {@code null} for none */ public Subscription setCreationTime(java.lang.Long creationTime) { this.creationTime = creationTime; return this; } /** * Primary domain name of the customer * @return value or {@code null} for none */ public java.lang.String getCustomerDomain() { return customerDomain; } /** * Primary domain name of the customer * @param customerDomain customerDomain or {@code null} for none */ public Subscription setCustomerDomain(java.lang.String customerDomain) { this.customerDomain = customerDomain; return this; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @return value or {@code null} for none */ public java.lang.String getCustomerId() { return customerId; } /** * This property will always be returned in a response as the unique identifier generated by * Google. In a request, this property can be either the primary domain or the unique identifier * generated by Google. * @param customerId customerId or {@code null} for none */ public Subscription setCustomerId(java.lang.String customerId) { this.customerId = customerId; return this; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @return value or {@code null} for none */ public java.lang.String getDealCode() { return dealCode; } /** * Google-issued code (100 char max) for discounted pricing on subscription plans. Deal code must * be included in insert requests in order to receive discounted rate. This property is optional, * regular pricing applies if left empty. * @param dealCode dealCode or {@code null} for none */ public Subscription setDealCode(java.lang.String dealCode) { this.dealCode = dealCode; return this; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies the resource as a Subscription. Value: reseller#subscription * @param kind kind or {@code null} for none */ public Subscription setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @return value or {@code null} for none */ public Plan getPlan() { return plan; } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. * @param plan plan or {@code null} for none */ public Subscription setPlan(Plan plan) { this.plan = plan; return this; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @return value or {@code null} for none */ public java.lang.String getPurchaseOrderId() { return purchaseOrderId; } /** * This is an optional property. This purchase order (PO) information is for resellers to use for * their company tracking usage. If a purchaseOrderId value is given it appears in the API * responses and shows up in the invoice. The property accepts up to 80 plain text characters. * @param purchaseOrderId purchaseOrderId or {@code null} for none */ public Subscription setPurchaseOrderId(java.lang.String purchaseOrderId) { this.purchaseOrderId = purchaseOrderId; return this; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @return value or {@code null} for none */ public RenewalSettings getRenewalSettings() { return renewalSettings; } /** * Renewal settings for the annual commitment plan. For more detailed information, see renewal * options in the administrator help center. * @param renewalSettings renewalSettings or {@code null} for none */ public Subscription setRenewalSettings(RenewalSettings renewalSettings) { this.renewalSettings = renewalSettings; return this; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @return value or {@code null} for none */ public java.lang.String getResourceUiUrl() { return resourceUiUrl; } /** * URL to customer's Subscriptions page in the Admin console. The read-only URL is generated by * the API service. This is used if your client application requires the customer to complete a * task using the Subscriptions page in the Admin console. * @param resourceUiUrl resourceUiUrl or {@code null} for none */ public Subscription setResourceUiUrl(java.lang.String resourceUiUrl) { this.resourceUiUrl = resourceUiUrl; return this; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @return value or {@code null} for none */ public Seats getSeats() { return seats; } /** * This is a required property. The number and limit of user seat licenses in the plan. * @param seats seats or {@code null} for none */ public Subscription setSeats(Seats seats) { this.seats = seats; return this; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuId() { return skuId; } /** * A required property. The skuId is a unique system identifier for a product's SKU assigned to a * customer in the subscription. For products and SKUs available in this version of the API, see * Product and SKU IDs. * @param skuId skuId or {@code null} for none */ public Subscription setSkuId(java.lang.String skuId) { this.skuId = skuId; return this; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @return value or {@code null} for none */ public java.lang.String getSkuName() { return skuName; } /** * Read-only external display name for a product's SKU assigned to a customer in the subscription. * SKU names are subject to change at Google's discretion. For products and SKUs available in this * version of the API, see Product and SKU IDs. * @param skuName skuName or {@code null} for none */ public Subscription setSkuName(java.lang.String skuName) { this.skuName = skuName; return this; } /** * This is an optional property. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * This is an optional property. * @param status status or {@code null} for none */ public Subscription setStatus(java.lang.String status) { this.status = status; return this; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @return value or {@code null} for none */ public java.lang.String getSubscriptionId() { return subscriptionId; } /** * The subscriptionId is the subscription identifier and is unique for each customer. This is a * required property. Since a subscriptionId changes when a subscription is updated, we recommend * not using this ID as a key for persistent data. Use the subscriptionId as described in retrieve * all reseller subscriptions. * @param subscriptionId subscriptionId or {@code null} for none */ public Subscription setSubscriptionId(java.lang.String subscriptionId) { this.subscriptionId = subscriptionId; return this; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - RESELLER_INITIATED * - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The customer's trial expired * without a plan selected. - OTHER - The customer is suspended for an internal Google reason * (e.g. abuse or otherwise). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSuspensionReasons() { return suspensionReasons; } /** * Read-only field containing an enumerable of all the current suspension reasons for a * subscription. It is possible for a subscription to have many concurrent, overlapping suspension * reasons. A subscription's STATUS is SUSPENDED until all pending suspensions are removed. * Possible options include: - PENDING_TOS_ACCEPTANCE - The customer has not logged in and * accepted the G Suite Resold Terms of Services. - RENEWAL_WITH_TYPE_CANCEL - The customer's * commitment ended and their service was cancelled at the end of their term. - RESELLER_INITIATED * - A manual suspension invoked by a Reseller. - TRIAL_ENDED - The customer's trial expired * without a plan selected. - OTHER - The customer is suspended for an internal Google reason * (e.g. abuse or otherwise). * @param suspensionReasons suspensionReasons or {@code null} for none */ public Subscription setSuspensionReasons(java.util.List<java.lang.String> suspensionReasons) { this.suspensionReasons = suspensionReasons; return this; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @return value or {@code null} for none */ public TransferInfo getTransferInfo() { return transferInfo; } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. * @param transferInfo transferInfo or {@code null} for none */ public Subscription setTransferInfo(TransferInfo transferInfo) { this.transferInfo = transferInfo; return this; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @return value or {@code null} for none */ public TrialSettings getTrialSettings() { return trialSettings; } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For * more information, see the API concepts. * @param trialSettings trialSettings or {@code null} for none */ public Subscription setTrialSettings(TrialSettings trialSettings) { this.trialSettings = trialSettings; return this; } @Override public Subscription set(String fieldName, Object value) { return (Subscription) super.set(fieldName, value); } @Override public Subscription clone() { return (Subscription) super.clone(); } /** * The plan property is required. In this version of the API, the G Suite plans are the flexible * plan, annual commitment plan, and the 30-day free trial plan. For more information about the * API"s payment plans, see the API concepts. */ public static final class Plan extends com.google.api.client.json.GenericJson { /** * In this version of the API, annual commitment plan's interval is one year. *Note: *When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * The value may be {@code null}. */ @com.google.api.client.util.Key private CommitmentInterval commitmentInterval; /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isCommitmentPlan; /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. Possible values are: - * ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. *Caution: * *ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — The * annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — The 30-day * free trial plan. A subscription in trial will be suspended after the 30th free day if no * payment plan is assigned. Calling changePlan will assign a payment plan to a trial but will not * activate the plan. A trial will automatically begin its assigned payment plan after its 30th * free day or immediately after calling startPaidService. - FREE — The free plan is exclusive to * the Cloud Identity SKU and does not incur any billing. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String planName; /** * In this version of the API, annual commitment plan's interval is one year. *Note: *When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @return value or {@code null} for none */ public CommitmentInterval getCommitmentInterval() { return commitmentInterval; } /** * In this version of the API, annual commitment plan's interval is one year. *Note: *When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. * @param commitmentInterval commitmentInterval or {@code null} for none */ public Plan setCommitmentInterval(CommitmentInterval commitmentInterval) { this.commitmentInterval = commitmentInterval; return this; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @return value or {@code null} for none */ public java.lang.Boolean getIsCommitmentPlan() { return isCommitmentPlan; } /** * The isCommitmentPlan property's boolean value identifies the plan as an annual commitment plan: * - true — The subscription's plan is an annual commitment plan. - false — The plan is not an * annual commitment plan. * @param isCommitmentPlan isCommitmentPlan or {@code null} for none */ public Plan setIsCommitmentPlan(java.lang.Boolean isCommitmentPlan) { this.isCommitmentPlan = isCommitmentPlan; return this; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. Possible values are: - * ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. *Caution: * *ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — The * annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — The 30-day * free trial plan. A subscription in trial will be suspended after the 30th free day if no * payment plan is assigned. Calling changePlan will assign a payment plan to a trial but will not * activate the plan. A trial will automatically begin its assigned payment plan after its 30th * free day or immediately after calling startPaidService. - FREE — The free plan is exclusive to * the Cloud Identity SKU and does not incur any billing. * @return value or {@code null} for none */ public java.lang.String getPlanName() { return planName; } /** * The planName property is required. This is the name of the subscription's plan. For more * information about the Google payment plans, see the API concepts. Possible values are: - * ANNUAL_MONTHLY_PAY — The annual commitment plan with monthly payments. *Caution: * *ANNUAL_MONTHLY_PAY is returned as ANNUAL in all API responses. - ANNUAL_YEARLY_PAY — The * annual commitment plan with yearly payments - FLEXIBLE — The flexible plan - TRIAL — The 30-day * free trial plan. A subscription in trial will be suspended after the 30th free day if no * payment plan is assigned. Calling changePlan will assign a payment plan to a trial but will not * activate the plan. A trial will automatically begin its assigned payment plan after its 30th * free day or immediately after calling startPaidService. - FREE — The free plan is exclusive to * the Cloud Identity SKU and does not incur any billing. * @param planName planName or {@code null} for none */ public Plan setPlanName(java.lang.String planName) { this.planName = planName; return this; } @Override public Plan set(String fieldName, Object value) { return (Plan) super.set(fieldName, value); } @Override public Plan clone() { return (Plan) super.clone(); } /** * In this version of the API, annual commitment plan's interval is one year. *Note: *When * billingMethod value is OFFLINE, the subscription property object plan.commitmentInterval is * omitted in all API responses. */ public static final class CommitmentInterval extends com.google.api.client.json.GenericJson { /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long endTime; /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long startTime; /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getEndTime() { return endTime; } /** * An annual commitment plan's interval's endTime in milliseconds using the UNIX Epoch format. See * an example Epoch converter. * @param endTime endTime or {@code null} for none */ public CommitmentInterval setEndTime(java.lang.Long endTime) { this.endTime = endTime; return this; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getStartTime() { return startTime; } /** * An annual commitment plan's interval's startTime in milliseconds using UNIX Epoch format. See * an example Epoch converter. * @param startTime startTime or {@code null} for none */ public CommitmentInterval setStartTime(java.lang.Long startTime) { this.startTime = startTime; return this; } @Override public CommitmentInterval set(String fieldName, Object value) { return (CommitmentInterval) super.set(fieldName, value); } @Override public CommitmentInterval clone() { return (CommitmentInterval) super.clone(); } } } /** * Read-only transfer related information for the subscription. For more information, see retrieve * transferable subscriptions for a customer. */ public static final class TransferInfo extends com.google.api.client.json.GenericJson { /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer minimumTransferableSeats; /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long transferabilityExpirationTime; /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @return value or {@code null} for none */ public java.lang.Integer getMinimumTransferableSeats() { return minimumTransferableSeats; } /** * When inserting a subscription, this is the minimum number of seats listed in the transfer order * for this product. For example, if the customer has 20 users, the reseller cannot place a * transfer order of 15 seats. The minimum is 20 seats. * @param minimumTransferableSeats minimumTransferableSeats or {@code null} for none */ public TransferInfo setMinimumTransferableSeats(java.lang.Integer minimumTransferableSeats) { this.minimumTransferableSeats = minimumTransferableSeats; return this; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @return value or {@code null} for none */ public java.lang.Long getTransferabilityExpirationTime() { return transferabilityExpirationTime; } /** * The time when transfer token or intent to transfer will expire. The time is in milliseconds * using UNIX Epoch format. * @param transferabilityExpirationTime transferabilityExpirationTime or {@code null} for none */ public TransferInfo setTransferabilityExpirationTime(java.lang.Long transferabilityExpirationTime) { this.transferabilityExpirationTime = transferabilityExpirationTime; return this; } @Override public TransferInfo set(String fieldName, Object value) { return (TransferInfo) super.set(fieldName, value); } @Override public TransferInfo clone() { return (TransferInfo) super.clone(); } } /** * The G Suite annual commitment and flexible payment plans can be in a 30-day free trial. For more * information, see the API concepts. */ public static final class TrialSettings extends com.google.api.client.json.GenericJson { /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean isInTrial; /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long trialEndTime; /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @return value or {@code null} for none */ public java.lang.Boolean getIsInTrial() { return isInTrial; } /** * Determines if a subscription's plan is in a 30-day free trial or not: - true — The plan is in * trial. - false — The plan is not in trial. * @param isInTrial isInTrial or {@code null} for none */ public TrialSettings setIsInTrial(java.lang.Boolean isInTrial) { this.isInTrial = isInTrial; return this; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @return value or {@code null} for none */ public java.lang.Long getTrialEndTime() { return trialEndTime; } /** * Date when the trial ends. The value is in milliseconds using the UNIX Epoch format. See an * example Epoch converter. * @param trialEndTime trialEndTime or {@code null} for none */ public TrialSettings setTrialEndTime(java.lang.Long trialEndTime) { this.trialEndTime = trialEndTime; return this; } @Override public TrialSettings set(String fieldName, Object value) { return (TrialSettings) super.set(fieldName, value); } @Override public TrialSettings clone() { return (TrialSettings) super.clone(); } } }
apache/iotdb
35,320
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/planner/plan/node/PlanVisitor.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.db.queryengine.plan.planner.plan.node; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.CountSchemaMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.DeviceSchemaFetchScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.DevicesCountNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.DevicesSchemaScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.LevelTimeSeriesCountNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.NodeManagementMemoryMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.NodePathsConvertNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.NodePathsCountNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.NodePathsSchemaScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.SchemaFetchMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.SchemaQueryMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.SchemaQueryOrderByHeatNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.SchemaQueryScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.SeriesSchemaFetchScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.TimeSeriesCountNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.read.TimeSeriesSchemaScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.ActivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.AlterTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.BatchActivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.ConstructSchemaBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.CreateAlignedTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.CreateMultiTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.CreateTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.DeactivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.DeleteTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.InternalBatchActivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.InternalCreateMultiTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.InternalCreateTimeSeriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.PreDeactivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.RollbackPreDeactivateTemplateNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.RollbackSchemaBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.AlterLogicalViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.ConstructLogicalViewBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.CreateLogicalViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.DeleteLogicalViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.metadata.write.view.RollbackLogicalViewBlackListNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeEnrichedDeleteDataNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeEnrichedInsertNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeEnrichedNonWritePlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeEnrichedWritePlanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.pipe.PipeOperateSchemaQueueNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.AI.InferenceNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.ActiveRegionScanMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.AggregationMergeSortNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.AggregationNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.ColumnInjectNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.DeviceMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.DeviceViewIntoNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.DeviceViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.ExchangeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.FillNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.FilterNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.GroupByLevelNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.GroupByTagNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.HorizontallyConcatNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.IntoNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.LimitNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.MergeSortNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.MultiChildProcessNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.OffsetNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.ProjectNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.RawDataAggregationNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.SingleChildProcessNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.SingleDeviceViewNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.SlidingWindowAggregationNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.SortNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.TopKNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.TransformNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.TwoChildProcessNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.join.FullOuterTimeJoinNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.join.InnerTimeJoinNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.join.LeftOuterTimeJoinNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.last.LastQueryCollectNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.last.LastQueryMergeNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.last.LastQueryNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.process.last.LastQueryTransformNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.sink.IdentitySinkNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.sink.ShuffleSinkNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.AlignedSeriesAggregationScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.AlignedSeriesScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.DeviceRegionScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.LastQueryScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.RegionScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.SeriesAggregationScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.SeriesAggregationSourceNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.SeriesScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.SeriesScanSourceNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.ShowQueriesNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.SourceNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.source.TimeseriesRegionScanNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.DeleteDataNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertMultiTabletsNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertRowNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertRowsNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertRowsOfOneDeviceNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertTabletNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.RelationalDeleteDataNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.RelationalInsertRowNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.RelationalInsertRowsNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.RelationalInsertTabletNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.iterative.GroupReference; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTreeDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.DeviceTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.GapFillNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.GroupNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.InformationSchemaTableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.LinearFillNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.MarkDistinctNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.PatternRecognitionNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.PreviousFillNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.SemiJoinNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TableFunctionNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TableFunctionProcessorNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TableScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TreeAlignedDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TreeDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.TreeNonAlignedDeviceViewScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.UnionNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.ValueFillNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.WindowNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.ConstructTableDevicesBlackListNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.CreateOrUpdateTableDeviceNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.DeleteTableDeviceNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.DeleteTableDevicesInBlackListNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.RollbackTableDevicesBlackListNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableAttributeColumnDropNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceAttributeCommitUpdateNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceAttributeUpdateNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceFetchNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceQueryCountNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableDeviceQueryScanNode; import org.apache.iotdb.db.queryengine.plan.relational.planner.node.schema.TableNodeLocationAddNode; @SuppressWarnings("java:S6539") // suppress "Monster class" warning public abstract class PlanVisitor<R, C> { public R process(PlanNode node, C context) { return node.accept(this, context); } public abstract R visitPlan(PlanNode node, C context); ///////////////////////////////////////////////////////////////////////////////////////////////// // Data Query Node ///////////////////////////////////////////////////////////////////////////////////////////////// // source -------------------------------------------------------------------------------------- public R visitSourceNode(SourceNode node, C context) { return visitPlan(node, context); } public R visitSeriesScanSource(SeriesScanSourceNode node, C context) { return visitSourceNode(node, context); } public R visitSeriesScan(SeriesScanNode node, C context) { return visitSeriesScanSource(node, context); } public R visitAlignedSeriesScan(AlignedSeriesScanNode node, C context) { return visitSeriesScanSource(node, context); } public R visitSeriesAggregationSourceNode(SeriesAggregationSourceNode node, C context) { return visitSourceNode(node, context); } public R visitSeriesAggregationScan(SeriesAggregationScanNode node, C context) { return visitSeriesAggregationSourceNode(node, context); } public R visitAlignedSeriesAggregationScan(AlignedSeriesAggregationScanNode node, C context) { return visitSeriesAggregationSourceNode(node, context); } public R visitLastQueryScan(LastQueryScanNode node, C context) { return visitSourceNode(node, context); } public R visitRegionScan(RegionScanNode node, C context) { return visitSourceNode(node, context); } public R visitDeviceRegionScan(DeviceRegionScanNode node, C context) { return visitRegionScan(node, context); } public R visitTimeSeriesRegionScan(TimeseriesRegionScanNode node, C context) { return visitRegionScan(node, context); } // single child -------------------------------------------------------------------------------- public R visitSingleChildProcess(SingleChildProcessNode node, C context) { return visitPlan(node, context); } public R visitFill(FillNode node, C context) { return visitSingleChildProcess(node, context); } public R visitFilter(FilterNode node, C context) { return visitSingleChildProcess(node, context); } public R visitSlidingWindowAggregation(SlidingWindowAggregationNode node, C context) { return visitSingleChildProcess(node, context); } public R visitLimit(LimitNode node, C context) { return visitSingleChildProcess(node, context); } public R visitOffset(OffsetNode node, C context) { return visitSingleChildProcess(node, context); } public R visitSort(SortNode node, C context) { return visitSingleChildProcess(node, context); } public R visitProject(ProjectNode node, C context) { return visitSingleChildProcess(node, context); } public R visitExchange(ExchangeNode node, C context) { return visitSingleChildProcess(node, context); } public R visitTransform(TransformNode node, C context) { return visitSingleChildProcess(node, context); } public R visitInto(IntoNode node, C context) { return visitSingleChildProcess(node, context); } public R visitInto( org.apache.iotdb.db.queryengine.plan.relational.planner.node.IntoNode node, C context) { return visitSingleChildProcess(node, context); } public R visitDeviceViewInto(DeviceViewIntoNode node, C context) { return visitSingleChildProcess(node, context); } public R visitColumnInject(ColumnInjectNode node, C context) { return visitSingleChildProcess(node, context); } public R visitSingleDeviceView(SingleDeviceViewNode node, C context) { return visitSingleChildProcess(node, context); } public R visitInference(InferenceNode node, C context) { return visitSingleChildProcess(node, context); } public R visitExplainAnalyze(ExplainAnalyzeNode node, C context) { return visitSingleChildProcess(node, context); } public R visitRawDataAggregation(RawDataAggregationNode node, C context) { return visitSingleChildProcess(node, context); } // two child ----------------------------------------------------------------------------------- public R visitTwoChildProcess(TwoChildProcessNode node, C context) { return visitPlan(node, context); } public R visitLeftOuterTimeJoin(LeftOuterTimeJoinNode node, C context) { return visitTwoChildProcess(node, context); } // multi child -------------------------------------------------------------------------------- public R visitMultiChildProcess(MultiChildProcessNode node, C context) { return visitPlan(node, context); } public R visitDeviceView(DeviceViewNode node, C context) { return visitMultiChildProcess(node, context); } public R visitAggregationMergeSort(AggregationMergeSortNode node, C context) { return visitMultiChildProcess(node, context); } public R visitDeviceMerge(DeviceMergeNode node, C context) { return visitMultiChildProcess(node, context); } public R visitGroupByLevel(GroupByLevelNode node, C context) { return visitMultiChildProcess(node, context); } public R visitGroupByTag(GroupByTagNode node, C context) { return visitMultiChildProcess(node, context); } public R visitAggregation(AggregationNode node, C context) { return visitMultiChildProcess(node, context); } public R visitFullOuterTimeJoin(FullOuterTimeJoinNode node, C context) { return visitMultiChildProcess(node, context); } public R visitInnerTimeJoin(InnerTimeJoinNode node, C context) { return visitMultiChildProcess(node, context); } public R visitLastQuery(LastQueryNode node, C context) { return visitMultiChildProcess(node, context); } public R visitLastQueryMerge(LastQueryMergeNode node, C context) { return visitMultiChildProcess(node, context); } public R visitLastQueryCollect(LastQueryCollectNode node, C context) { return visitMultiChildProcess(node, context); } public R visitLastQueryTransform(LastQueryTransformNode node, C context) { return visitSingleChildProcess(node, context); } public R visitMergeSort(MergeSortNode node, C context) { return visitMultiChildProcess(node, context); } public R visitTopK(TopKNode node, C context) { return visitMultiChildProcess(node, context); } public R visitHorizontallyConcat(HorizontallyConcatNode node, C context) { return visitMultiChildProcess(node, context); } public R visitRegionMerge(ActiveRegionScanMergeNode node, C context) { return visitMultiChildProcess(node, context); } // others ----------------------------------------------------------------------------------- public R visitShowQueries(ShowQueriesNode node, C context) { return visitPlan(node, context); } public R visitIdentitySink(IdentitySinkNode node, C context) { return visitPlan(node, context); } public R visitShuffleSink(ShuffleSinkNode node, C context) { return visitPlan(node, context); } ///////////////////////////////////////////////////////////////////////////////////////////////// // Schema Write & Query Node ///////////////////////////////////////////////////////////////////////////////////////////////// public R visitSchemaQueryMerge(SchemaQueryMergeNode node, C context) { return visitPlan(node, context); } public R visitSchemaQueryScan(SchemaQueryScanNode node, C context) { return visitPlan(node, context); } public R visitSchemaQueryOrderByHeat(SchemaQueryOrderByHeatNode node, C context) { return visitPlan(node, context); } public R visitTimeSeriesSchemaScan(TimeSeriesSchemaScanNode node, C context) { return visitPlan(node, context); } public R visitDevicesSchemaScan(DevicesSchemaScanNode node, C context) { return visitPlan(node, context); } public R visitDevicesCount(DevicesCountNode node, C context) { return visitPlan(node, context); } public R visitTimeSeriesCount(TimeSeriesCountNode node, C context) { return visitPlan(node, context); } public R visitLevelTimeSeriesCount(LevelTimeSeriesCountNode node, C context) { return visitPlan(node, context); } public R visitCountMerge(CountSchemaMergeNode node, C context) { return visitPlan(node, context); } public R visitCreateTimeSeries(CreateTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitSchemaFetchMerge(SchemaFetchMergeNode node, C context) { return visitPlan(node, context); } public R visitSeriesSchemaFetchScan(SeriesSchemaFetchScanNode node, C context) { return visitPlan(node, context); } public R visitDeviceSchemaFetchScan(DeviceSchemaFetchScanNode node, C context) { return visitPlan(node, context); } public R visitCreateAlignedTimeSeries(CreateAlignedTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitCreateMultiTimeSeries(CreateMultiTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitAlterTimeSeries(AlterTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitInternalCreateTimeSeries(InternalCreateTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitActivateTemplate(ActivateTemplateNode node, C context) { return visitPlan(node, context); } public R visitPreDeactivateTemplate(PreDeactivateTemplateNode node, C context) { return visitPlan(node, context); } public R visitRollbackPreDeactivateTemplate(RollbackPreDeactivateTemplateNode node, C context) { return visitPlan(node, context); } public R visitDeactivateTemplate(DeactivateTemplateNode node, C context) { return visitPlan(node, context); } public R visitInternalBatchActivateTemplate(InternalBatchActivateTemplateNode node, C context) { return visitPlan(node, context); } public R visitInternalCreateMultiTimeSeries(InternalCreateMultiTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitNodePathsSchemaScan(NodePathsSchemaScanNode node, C context) { return visitPlan(node, context); } public R visitNodeManagementMemoryMerge(NodeManagementMemoryMergeNode node, C context) { return visitPlan(node, context); } public R visitNodePathConvert(NodePathsConvertNode node, C context) { return visitPlan(node, context); } public R visitNodePathsCount(NodePathsCountNode node, C context) { return visitPlan(node, context); } public R visitDeleteTimeseries(DeleteTimeSeriesNode node, C context) { return visitPlan(node, context); } public R visitConstructSchemaBlackList(ConstructSchemaBlackListNode node, C context) { return visitPlan(node, context); } public R visitRollbackSchemaBlackList(RollbackSchemaBlackListNode node, C context) { return visitPlan(node, context); } public R visitBatchActivateTemplate(BatchActivateTemplateNode node, C context) { return visitPlan(node, context); } public R visitCreateLogicalView(CreateLogicalViewNode node, C context) { return visitPlan(node, context); } public R visitConstructLogicalViewBlackList(ConstructLogicalViewBlackListNode node, C context) { return visitPlan(node, context); } public R visitRollbackLogicalViewBlackList(RollbackLogicalViewBlackListNode node, C context) { return visitPlan(node, context); } public R visitDeleteLogicalView(DeleteLogicalViewNode node, C context) { return visitPlan(node, context); } public R visitAlterLogicalView(AlterLogicalViewNode node, C context) { return visitPlan(node, context); } public R visitCreateOrUpdateTableDevice( final CreateOrUpdateTableDeviceNode node, final C context) { return visitPlan(node, context); } public R visitTableDeviceAttributeUpdate( final TableDeviceAttributeUpdateNode node, final C context) { return visitPlan(node, context); } public R visitTableDeviceFetch(final TableDeviceFetchNode node, final C context) { return visitPlan(node, context); } public R visitTableDeviceQueryScan(final TableDeviceQueryScanNode node, final C context) { return visitPlan(node, context); } public R visitTableDeviceQueryCount(final TableDeviceQueryCountNode node, final C context) { return visitPlan(node, context); } public R visitTableDeviceAttributeCommit( final TableDeviceAttributeCommitUpdateNode node, final C context) { return visitPlan(node, context); } public R visitTableNodeLocationAdd(final TableNodeLocationAddNode node, final C context) { return visitPlan(node, context); } public R visitDeleteTableDevice(final DeleteTableDeviceNode node, final C context) { return visitPlan(node, context); } public R visitTableAttributeColumnDrop(final TableAttributeColumnDropNode node, final C context) { return visitPlan(node, context); } public R visitConstructTableDevicesBlackList( final ConstructTableDevicesBlackListNode node, final C context) { return visitPlan(node, context); } public R visitRollbackTableDevicesBlackList( final RollbackTableDevicesBlackListNode node, final C context) { return visitPlan(node, context); } public R visitDeleteTableDevicesInBlackList( final DeleteTableDevicesInBlackListNode node, final C context) { return visitPlan(node, context); } ///////////////////////////////////////////////////////////////////////////////////////////////// // Data Write Node ///////////////////////////////////////////////////////////////////////////////////////////////// public R visitInsertRow(InsertRowNode node, C context) { return visitPlan(node, context); } public R visitRelationalInsertRow(RelationalInsertRowNode node, C context) { return visitInsertRow(node, context); } public R visitRelationalInsertRows(RelationalInsertRowsNode node, C context) { return visitInsertRows(node, context); } public R visitInsertTablet(InsertTabletNode node, C context) { return visitPlan(node, context); } public R visitRelationalInsertTablet(RelationalInsertTabletNode node, C context) { return visitInsertTablet(node, context); } public R visitInsertRows(InsertRowsNode node, C context) { return visitPlan(node, context); } public R visitInsertMultiTablets(InsertMultiTabletsNode node, C context) { return visitPlan(node, context); } public R visitInsertRowsOfOneDevice(InsertRowsOfOneDeviceNode node, C context) { return visitPlan(node, context); } public R visitDeleteData(DeleteDataNode node, C context) { return visitPlan(node, context); } public R visitDeleteData(RelationalDeleteDataNode node, C context) { return visitPlan(node, context); } ///////////////////////////////////////////////////////////////////////////////////////////////// // Pipe Related Node ///////////////////////////////////////////////////////////////////////////////////////////////// public R visitPipeEnrichedInsertNode(PipeEnrichedInsertNode node, C context) { return visitPlan(node, context); } public R visitPipeEnrichedDeleteDataNode(PipeEnrichedDeleteDataNode node, C context) { return visitPlan(node, context); } public R visitPipeEnrichedWritePlanNode(PipeEnrichedWritePlanNode node, C context) { return visitPlan(node, context); } public R visitPipeEnrichedNonWritePlanNode(PipeEnrichedNonWritePlanNode node, C context) { return visitPlan(node, context); } public R visitPipeOperateSchemaQueueNode(PipeOperateSchemaQueueNode node, C context) { return visitPlan(node, context); } // =============================== Used for Table Model ==================================== public R visitFilter( org.apache.iotdb.db.queryengine.plan.relational.planner.node.FilterNode node, C context) { return visitSingleChildProcess(node, context); } public R visitApply( org.apache.iotdb.db.queryengine.plan.relational.planner.node.ApplyNode node, C context) { return visitTwoChildProcess(node, context); } public R visitAssignUniqueId( org.apache.iotdb.db.queryengine.plan.relational.planner.node.AssignUniqueId node, C context) { return visitSingleChildProcess(node, context); } public R visitEnforceSingleRow( org.apache.iotdb.db.queryengine.plan.relational.planner.node.EnforceSingleRowNode node, C context) { return visitSingleChildProcess(node, context); } public R visitCorrelatedJoin( org.apache.iotdb.db.queryengine.plan.relational.planner.node.CorrelatedJoinNode node, C context) { return visitTwoChildProcess(node, context); } public R visitTableScan(TableScanNode node, C context) { return visitPlan(node, context); } public R visitDeviceTableScan(DeviceTableScanNode node, C context) { return visitTableScan(node, context); } public R visitInformationSchemaTableScan(InformationSchemaTableScanNode node, C context) { return visitTableScan(node, context); } public R visitProject( org.apache.iotdb.db.queryengine.plan.relational.planner.node.ProjectNode node, C context) { return visitSingleChildProcess(node, context); } public R visitLimit( org.apache.iotdb.db.queryengine.plan.relational.planner.node.LimitNode node, C context) { return visitSingleChildProcess(node, context); } public R visitOffset( org.apache.iotdb.db.queryengine.plan.relational.planner.node.OffsetNode node, C context) { return visitSingleChildProcess(node, context); } public R visitMergeSort( org.apache.iotdb.db.queryengine.plan.relational.planner.node.MergeSortNode node, C context) { return visitMultiChildProcess(node, context); } public R visitExplainAnalyze( org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExplainAnalyzeNode node, C context) { return visitSingleChildProcess(node, context); } public R visitOutput( org.apache.iotdb.db.queryengine.plan.relational.planner.node.OutputNode node, C context) { return visitSingleChildProcess(node, context); } public R visitCollect( org.apache.iotdb.db.queryengine.plan.relational.planner.node.CollectNode node, C context) { return visitMultiChildProcess(node, context); } public R visitGapFill(GapFillNode node, C context) { return visitSingleChildProcess(node, context); } public R visitFill( org.apache.iotdb.db.queryengine.plan.relational.planner.node.FillNode node, C context) { return visitSingleChildProcess(node, context); } public R visitPreviousFill(PreviousFillNode node, C context) { return visitFill(node, context); } public R visitLinearFill(LinearFillNode node, C context) { return visitFill(node, context); } public R visitValueFill(ValueFillNode node, C context) { return visitFill(node, context); } public R visitSort( org.apache.iotdb.db.queryengine.plan.relational.planner.node.SortNode node, C context) { return visitSingleChildProcess(node, context); } public R visitStreamSort( org.apache.iotdb.db.queryengine.plan.relational.planner.node.StreamSortNode node, C context) { return visitSingleChildProcess(node, context); } public R visitGroup(GroupNode node, C context) { return visitSingleChildProcess(node, context); } public R visitTopK( org.apache.iotdb.db.queryengine.plan.relational.planner.node.TopKNode node, C context) { return visitMultiChildProcess(node, context); } public R visitJoin( org.apache.iotdb.db.queryengine.plan.relational.planner.node.JoinNode node, C context) { return visitTwoChildProcess(node, context); } public R visitSemiJoin(SemiJoinNode node, C context) { return visitTwoChildProcess(node, context); } public R visitGroupReference(GroupReference node, C context) { return visitPlan(node, context); } public R visitAggregation( org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationNode node, C context) { return visitSingleChildProcess(node, context); } public R visitTableExchange( org.apache.iotdb.db.queryengine.plan.relational.planner.node.ExchangeNode node, C context) { return visitSingleChildProcess(node, context); } public R visitAggregationTableScan( org.apache.iotdb.db.queryengine.plan.relational.planner.node.AggregationTableScanNode node, C context) { return visitDeviceTableScan(node, context); } public R visitTreeDeviceViewScan(TreeDeviceViewScanNode node, C context) { return visitDeviceTableScan(node, context); } public R visitAggregationTreeDeviceViewScan(AggregationTreeDeviceViewScanNode node, C context) { return visitAggregationTableScan(node, context); } public R visitTreeAlignedDeviceViewScan(TreeAlignedDeviceViewScanNode node, C context) { return visitTreeDeviceViewScan(node, context); } public R visitTreeNonAlignedDeviceViewScan(TreeNonAlignedDeviceViewScanNode node, C context) { return visitTreeDeviceViewScan(node, context); } public R visitMarkDistinct(MarkDistinctNode node, C context) { return visitSingleChildProcess(node, context); } public R visitWindowFunction(WindowNode node, C context) { return visitPlan(node, context); } public R visitTableFunction(TableFunctionNode node, C context) { return visitPlan(node, context); } public R visitTableFunctionProcessor(TableFunctionProcessorNode node, C context) { return visitPlan(node, context); } public R visitPatternRecognition(PatternRecognitionNode node, C context) { return visitPlan(node, context); } public R visitUnion(UnionNode node, C context) { return visitPlan(node, context); } }
apache/lucene
35,118
lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.queryparser.classic; import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; import java.io.StringReader; import java.text.DateFormat; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.DateTools; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.charstream.CharStream; import org.apache.lucene.queryparser.charstream.FastCharStream; import org.apache.lucene.queryparser.classic.QueryParser.Operator; import org.apache.lucene.queryparser.flexible.standard.CommonQueryParserConfiguration; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.IndexSearcher.TooManyClauses; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.QueryBuilder; import org.apache.lucene.util.automaton.RegExp; /** * This class is overridden by QueryParser in QueryParser.jj and acts to separate the majority of * the Java code from the .jj grammar file. */ public abstract class QueryParserBase extends QueryBuilder implements CommonQueryParserConfiguration { static final int CONJ_NONE = 0; static final int CONJ_AND = 1; static final int CONJ_OR = 2; static final int MOD_NONE = 0; static final int MOD_NOT = 10; static final int MOD_REQ = 11; // make it possible to call setDefaultOperator() without accessing // the nested class: /** Alternative form of QueryParser.Operator.AND */ public static final Operator AND_OPERATOR = Operator.AND; /** Alternative form of QueryParser.Operator.OR */ public static final Operator OR_OPERATOR = Operator.OR; /** The actual operator that parser uses to combine query terms */ Operator operator = OR_OPERATOR; MultiTermQuery.RewriteMethod multiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE; boolean allowLeadingWildcard = false; protected String field; int phraseSlop = 0; float fuzzyMinSim = FuzzyQuery.defaultMaxEdits; int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength; Locale locale = Locale.getDefault(); TimeZone timeZone = TimeZone.getDefault(); // the default date resolution DateTools.Resolution dateResolution = null; // maps field names to date resolutions Map<String, DateTools.Resolution> fieldToDateResolution = null; boolean autoGeneratePhraseQueries; int determinizeWorkLimit = DEFAULT_DETERMINIZE_WORK_LIMIT; // So the generated QueryParser(CharStream) won't error out protected QueryParserBase() { super(null); } /** * Initializes a query parser. Called by the QueryParser constructor * * @param f the default field for query terms. * @param a used to find terms in the query text. */ public void init(String f, Analyzer a) { setAnalyzer(a); field = f; setAutoGeneratePhraseQueries(false); } // the generated parser will create these in QueryParser public abstract void ReInit(CharStream stream); public abstract Query TopLevelQuery(String field) throws ParseException; /** * Parses a query string, returning a {@link org.apache.lucene.search.Query}. * * @param query the query string to be parsed. * @throws ParseException if the parsing fails */ public Query parse(String query) throws ParseException { ReInit(new FastCharStream(new StringReader(query))); try { // TopLevelQuery is a Query followed by the end-of-input (EOF) Query res = TopLevelQuery(field); return res != null ? res : newBooleanQuery().build(); } catch (ParseException | TokenMgrError tme) { // rethrow to include the original query: ParseException e = new ParseException("Cannot parse '" + query + "': " + tme.getMessage()); e.initCause(tme); throw e; } catch (TooManyClauses tmc) { ParseException e = new ParseException("Cannot parse '" + query + "': too many boolean clauses"); e.initCause(tmc); throw e; } } /** * @return Returns the default field. */ public String getField() { return field; } /** * @see #setAutoGeneratePhraseQueries(boolean) */ public final boolean getAutoGeneratePhraseQueries() { return autoGeneratePhraseQueries; } /** * Set to true if phrase queries will be automatically generated when the analyzer returns more * than one term from whitespace delimited text. NOTE: this behavior may not be suitable for all * languages. * * <p>Set to false if phrase queries should only be generated when surrounded by double quotes. */ public void setAutoGeneratePhraseQueries(boolean value) { this.autoGeneratePhraseQueries = value; } /** Get the minimal similarity for fuzzy queries. */ @Override public float getFuzzyMinSim() { return fuzzyMinSim; } /** Set the minimum similarity for fuzzy queries. Default is 2f. */ @Override public void setFuzzyMinSim(float fuzzyMinSim) { this.fuzzyMinSim = fuzzyMinSim; } /** * Get the prefix length for fuzzy queries. * * @return Returns the fuzzyPrefixLength. */ @Override public int getFuzzyPrefixLength() { return fuzzyPrefixLength; } /** * Set the prefix length for fuzzy queries. Default is 0. * * @param fuzzyPrefixLength The fuzzyPrefixLength to set. */ @Override public void setFuzzyPrefixLength(int fuzzyPrefixLength) { this.fuzzyPrefixLength = fuzzyPrefixLength; } /** * Sets the default slop for phrases. If zero, then exact phrase matches are required. Default * value is zero. */ @Override public void setPhraseSlop(int phraseSlop) { this.phraseSlop = phraseSlop; } /** Gets the default slop for phrases. */ @Override public int getPhraseSlop() { return phraseSlop; } /** * Set to <code>true</code> to allow leading wildcard characters. * * <p>When set, <code>*</code> or <code>?</code> are allowed as the first character of a * PrefixQuery and WildcardQuery. Note that this can produce very slow queries on big indexes. * * <p>Default: false. */ @Override public void setAllowLeadingWildcard(boolean allowLeadingWildcard) { this.allowLeadingWildcard = allowLeadingWildcard; } /** * @see #setAllowLeadingWildcard(boolean) */ @Override public boolean getAllowLeadingWildcard() { return allowLeadingWildcard; } /** * Sets the boolean operator of the QueryParser. In default mode (<code>OR_OPERATOR</code>) terms * without any modifiers are considered optional: for example <code>capital of Hungary</code> is * equal to <code>capital OR of OR Hungary</code>.<br> * In <code>AND_OPERATOR</code> mode terms are considered to be in conjunction: the * above-mentioned query is parsed as <code>capital AND of AND Hungary</code> */ public void setDefaultOperator(Operator op) { this.operator = op; } /** Gets implicit operator setting, which will be either AND_OPERATOR or OR_OPERATOR. */ public Operator getDefaultOperator() { return operator; } @Override public void setMultiTermRewriteMethod(MultiTermQuery.RewriteMethod method) { multiTermRewriteMethod = method; } /** * @see #setMultiTermRewriteMethod */ @Override public MultiTermQuery.RewriteMethod getMultiTermRewriteMethod() { return multiTermRewriteMethod; } /** Set locale used by date range parsing, lowercasing, and other locale-sensitive operations. */ @Override public void setLocale(Locale locale) { this.locale = locale; } /** Returns current locale, allowing access by subclasses. */ @Override public Locale getLocale() { return locale; } @Override public void setTimeZone(TimeZone timeZone) { this.timeZone = timeZone; } @Override public TimeZone getTimeZone() { return timeZone; } /** * Sets the default date resolution used by RangeQueries for fields for which no specific date * resolutions has been set. Field specific resolutions can be set with {@link * #setDateResolution(String, org.apache.lucene.document.DateTools.Resolution)}. * * @param dateResolution the default date resolution to set */ @Override public void setDateResolution(DateTools.Resolution dateResolution) { this.dateResolution = dateResolution; } /** * Sets the date resolution used by RangeQueries for a specific field. * * @param fieldName field for which the date resolution is to be set * @param dateResolution date resolution to set */ public void setDateResolution(String fieldName, DateTools.Resolution dateResolution) { if (fieldName == null) { throw new IllegalArgumentException("Field must not be null."); } if (fieldToDateResolution == null) { // lazily initialize HashMap fieldToDateResolution = new HashMap<>(); } fieldToDateResolution.put(fieldName, dateResolution); } /** * Returns the date resolution that is used by RangeQueries for the given field. Returns null, if * no default or field specific date resolution has been set for the given field. */ public DateTools.Resolution getDateResolution(String fieldName) { if (fieldName == null) { throw new IllegalArgumentException("Field must not be null."); } if (fieldToDateResolution == null) { // no field specific date resolutions set; return default date resolution instead return this.dateResolution; } DateTools.Resolution resolution = fieldToDateResolution.get(fieldName); if (resolution == null) { // no date resolutions set for the given field; return default date resolution instead resolution = this.dateResolution; } return resolution; } /** * @param determinizeWorkLimit the maximum effort that determinizing a regexp query can spend. If * the query requires more effort, a TooComplexToDeterminizeException is thrown. */ public void setDeterminizeWorkLimit(int determinizeWorkLimit) { this.determinizeWorkLimit = determinizeWorkLimit; } /** * @return the maximum effort that determinizing a regexp query can spend. If the query requires * more effort, a TooComplexToDeterminizeException is thrown. */ public int getDeterminizeWorkLimit() { return determinizeWorkLimit; } protected void addClause(List<BooleanClause> clauses, int conj, int mods, Query q) { boolean required, prohibited; // If this term is introduced by AND, make the preceding term required, // unless it's already prohibited if (clauses.size() > 0 && conj == CONJ_AND) { BooleanClause c = clauses.get(clauses.size() - 1); if (!c.isProhibited()) clauses.set(clauses.size() - 1, new BooleanClause(c.query(), Occur.MUST)); } if (clauses.size() > 0 && operator == AND_OPERATOR && conj == CONJ_OR) { // If this term is introduced by OR, make the preceding term optional, // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b) // notice if the input is a OR b, first term is parsed as required; without // this modification a OR b would be parsed as +a OR b BooleanClause c = clauses.get(clauses.size() - 1); if (!c.isProhibited()) clauses.set(clauses.size() - 1, new BooleanClause(c.query(), Occur.SHOULD)); } // We might have been passed a null query; the term might have been // filtered away by the analyzer. if (q == null) return; if (operator == OR_OPERATOR) { // We set REQUIRED if we're introduced by AND or +; PROHIBITED if // introduced by NOT or -; make sure not to set both. prohibited = (mods == MOD_NOT); required = (mods == MOD_REQ); if (conj == CONJ_AND && !prohibited) { required = true; } } else { // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED // if not PROHIBITED and not introduced by OR prohibited = (mods == MOD_NOT); required = (!prohibited && conj != CONJ_OR); } if (required && !prohibited) clauses.add(newBooleanClause(q, BooleanClause.Occur.MUST)); else if (!required && !prohibited) clauses.add(newBooleanClause(q, BooleanClause.Occur.SHOULD)); else if (!required && prohibited) clauses.add(newBooleanClause(q, BooleanClause.Occur.MUST_NOT)); else throw new RuntimeException("Clause cannot be both required and prohibited"); } /** * Adds clauses generated from analysis over text containing whitespace. There are no operators, * so the query's clauses can either be MUST (if the default operator is AND) or SHOULD (default * OR). * * <p>If all of the clauses in the given Query are TermQuery-s, this method flattens the result by * adding the TermQuery-s individually to the output clause list; otherwise, the given Query is * added as a single clause including its nested clauses. */ protected void addMultiTermClauses(List<BooleanClause> clauses, Query q) { // We might have been passed a null query; the term might have been // filtered away by the analyzer. if (q == null) { return; } boolean allNestedTermQueries = false; if (q instanceof BooleanQuery) { allNestedTermQueries = true; for (BooleanClause clause : ((BooleanQuery) q).clauses()) { if (!(clause.query() instanceof TermQuery)) { allNestedTermQueries = false; break; } } } if (allNestedTermQueries) { clauses.addAll(((BooleanQuery) q).clauses()); } else { BooleanClause.Occur occur = operator == OR_OPERATOR ? BooleanClause.Occur.SHOULD : BooleanClause.Occur.MUST; if (q instanceof BooleanQuery) { for (BooleanClause clause : ((BooleanQuery) q).clauses()) { clauses.add(newBooleanClause(clause.query(), occur)); } } else { clauses.add(newBooleanClause(q, occur)); } } } /** * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException { return newFieldQuery(getAnalyzer(), field, queryText, quoted); } /** * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query newFieldQuery(Analyzer analyzer, String field, String queryText, boolean quoted) throws ParseException { BooleanClause.Occur occur = operator == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD; return createFieldQuery( analyzer, occur, field, queryText, quoted || autoGeneratePhraseQueries, phraseSlop); } /** * Base implementation delegates to {@link #getFieldQuery(String,String,boolean)}. This method may * be overridden, for example, to return a SpanNearQuery instead of a PhraseQuery. * * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException { Query query = getFieldQuery(field, queryText, true); if (query instanceof PhraseQuery) { query = addSlopToPhrase((PhraseQuery) query, slop); } else if (query instanceof MultiPhraseQuery mpq) { if (slop != mpq.getSlop()) { query = new MultiPhraseQuery.Builder(mpq).setSlop(slop).build(); } } return query; } /** Rebuild a phrase query with a slop value */ private PhraseQuery addSlopToPhrase(PhraseQuery query, int slop) { PhraseQuery.Builder builder = new PhraseQuery.Builder(); builder.setSlop(slop); org.apache.lucene.index.Term[] terms = query.getTerms(); int[] positions = query.getPositions(); for (int i = 0; i < terms.length; ++i) { builder.add(terms[i], positions[i]); } return builder.build(); } protected Query getRangeQuery( String field, String part1, String part2, boolean startInclusive, boolean endInclusive) throws ParseException { DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, locale); df.setLenient(true); DateTools.Resolution resolution = getDateResolution(field); try { part1 = DateTools.dateToString(df.parse(part1), resolution); } catch ( @SuppressWarnings("unused") Exception e) { } try { Date d2 = df.parse(part2); if (endInclusive) { // The user can only specify the date, not the time, so make sure // the time is set to the latest possible time of that date to really // include all documents: Calendar cal = Calendar.getInstance(timeZone, locale); cal.setTime(d2); cal.set(Calendar.HOUR_OF_DAY, 23); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); d2 = cal.getTime(); } part2 = DateTools.dateToString(d2, resolution); } catch ( @SuppressWarnings("unused") Exception e) { } return newRangeQuery(field, part1, part2, startInclusive, endInclusive); } /** * Builds a new BooleanClause instance * * @param q sub query * @param occur how this clause should occur when matching documents * @return new BooleanClause instance */ protected BooleanClause newBooleanClause(Query q, BooleanClause.Occur occur) { return new BooleanClause(q, occur); } /** * Builds a new PrefixQuery instance * * @param prefix Prefix term * @return new PrefixQuery instance */ protected Query newPrefixQuery(Term prefix) { return new PrefixQuery(prefix, multiTermRewriteMethod); } /** * Builds a new RegexpQuery instance * * @param regexp Regexp term * @return new RegexpQuery instance */ protected Query newRegexpQuery(Term regexp) { return new RegexpQuery( regexp, RegExp.ALL, 0, RegexpQuery.DEFAULT_PROVIDER, determinizeWorkLimit, multiTermRewriteMethod); } /** * Builds a new FuzzyQuery instance * * @param term Term * @param minimumSimilarity minimum similarity * @param prefixLength prefix length * @return new FuzzyQuery Instance */ protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) { // FuzzyQuery doesn't yet allow constant score rewrite String text = term.text(); int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length())); return new FuzzyQuery(term, numEdits, prefixLength); } /** * Builds a new {@link TermRangeQuery} instance * * @param field Field * @param part1 min * @param part2 max * @param startInclusive true if the start of the range is inclusive * @param endInclusive true if the end of the range is inclusive * @return new {@link TermRangeQuery} instance */ protected Query newRangeQuery( String field, String part1, String part2, boolean startInclusive, boolean endInclusive) { final BytesRef start; final BytesRef end; if (part1 == null) { start = null; } else { start = getAnalyzer().normalize(field, part1); } if (part2 == null) { end = null; } else { end = getAnalyzer().normalize(field, part2); } return new TermRangeQuery( field, start, end, startInclusive, endInclusive, multiTermRewriteMethod); } /** * Builds a new MatchAllDocsQuery instance * * @return new MatchAllDocsQuery instance */ protected Query newMatchAllDocsQuery() { return new MatchAllDocsQuery(); } /** * Builds a new WildcardQuery instance * * @param t wildcard term * @return new WildcardQuery instance */ protected Query newWildcardQuery(Term t) { return new WildcardQuery(t, determinizeWorkLimit, multiTermRewriteMethod); } /** * Factory method for generating query, given a set of clauses. By default creates a boolean query * composed of clauses passed in. * * <p>Can be overridden by extending classes, to modify query being returned. * * @param clauses List that contains {@link org.apache.lucene.search.BooleanClause} instances to * join. * @return Resulting {@link org.apache.lucene.search.Query} object. * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getBooleanQuery(List<BooleanClause> clauses) throws ParseException { if (clauses.isEmpty()) { return null; // all clause words were filtered away by the analyzer. } BooleanQuery.Builder query = newBooleanQuery(); for (final BooleanClause clause : clauses) { query.add(clause); } return query.build(); } /** * Factory method for generating a query. Called when parser parses an input term token that * contains one or more wildcard characters (? and *), but is not a prefix term token (one that * has just a single * character at the end) * * <p>Depending on settings, prefix term may be lower-cased automatically. It will not go through * the default Analyzer, however, since normal Analyzers are unlikely to work properly with * wildcard templates. * * <p>Can be overridden by extending classes, to provide custom handling for wildcard queries, * which may be necessary due to missing analyzer calls. * * @param field Name of the field query will use. * @param termStr Term token that contains one or more wild card characters (? or *), but is not * simple prefix term * @return Resulting {@link org.apache.lucene.search.Query} built for the term * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getWildcardQuery(String field, String termStr) throws ParseException { if ("*".equals(field)) { if ("*".equals(termStr)) return newMatchAllDocsQuery(); } if (!allowLeadingWildcard && (termStr.startsWith("*") || termStr.startsWith("?"))) throw new ParseException("'*' or '?' not allowed as first character in WildcardQuery"); Term t = new Term(field, analyzeWildcard(field, termStr)); return newWildcardQuery(t); } private static final Pattern WILDCARD_PATTERN = Pattern.compile("(\\\\.)|([?*]+)"); private BytesRef analyzeWildcard(String field, String termStr) { // best effort to not pass the wildcard characters and escaped characters through #normalize Matcher wildcardMatcher = WILDCARD_PATTERN.matcher(termStr); BytesRefBuilder sb = new BytesRefBuilder(); int last = 0; while (wildcardMatcher.find()) { if (wildcardMatcher.start() > 0) { String chunk = termStr.substring(last, wildcardMatcher.start()); BytesRef normalized = getAnalyzer().normalize(field, chunk); sb.append(normalized); } // append the matched group - without normalizing sb.append(new BytesRef(wildcardMatcher.group())); last = wildcardMatcher.end(); } if (last < termStr.length()) { String chunk = termStr.substring(last); BytesRef normalized = getAnalyzer().normalize(field, chunk); sb.append(normalized); } return sb.toBytesRef(); } /** * Factory method for generating a query. Called when parser parses an input term token that * contains a regular expression query. * * <p>Depending on settings, pattern term may be lower-cased automatically. It will not go through * the default Analyzer, however, since normal Analyzers are unlikely to work properly with * regular expression templates. * * <p>Can be overridden by extending classes, to provide custom handling for regular expression * queries, which may be necessary due to missing analyzer calls. * * @param field Name of the field query will use. * @param termStr Term token that contains a regular expression * @return Resulting {@link org.apache.lucene.search.Query} built for the term * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getRegexpQuery(String field, String termStr) throws ParseException { // We need to pass the whole string to #normalize, which will not work with // custom attribute factories for the binary term impl, and may not work // with some analyzers BytesRef term = getAnalyzer().normalize(field, termStr); Term t = new Term(field, term); return newRegexpQuery(t); } /** * Factory method for generating a query (similar to {@link #getWildcardQuery}). Called when * parser parses an input term token that uses prefix notation; that is, contains a single '*' * wildcard character as its last character. Since this is a special case of generic wildcard * term, and such a query can be optimized easily, this usually results in a different query * object. * * <p>Depending on settings, a prefix term may be lower-cased automatically. It will not go * through the default Analyzer, however, since normal Analyzers are unlikely to work properly * with wildcard templates. * * <p>Can be overridden by extending classes, to provide custom handling for wild card queries, * which may be necessary due to missing analyzer calls. * * @param field Name of the field query will use. * @param termStr Term token to use for building term for the query (<b>without</b> trailing '*' * character!) * @return Resulting {@link org.apache.lucene.search.Query} built for the term * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getPrefixQuery(String field, String termStr) throws ParseException { if (!allowLeadingWildcard && termStr.startsWith("*")) throw new ParseException("'*' not allowed as first character in PrefixQuery"); BytesRef term = getAnalyzer().normalize(field, termStr); Term t = new Term(field, term); return newPrefixQuery(t); } /** * Factory method for generating a query (similar to {@link #getWildcardQuery}). Called when * parser parses an input term token that has the fuzzy suffix (~) appended. * * @param field Name of the field query will use. * @param termStr Term token to use for building term for the query * @return Resulting {@link org.apache.lucene.search.Query} built for the term * @exception org.apache.lucene.queryparser.classic.ParseException throw in overridden method to * disallow */ protected Query getFuzzyQuery(String field, String termStr, float minSimilarity) throws ParseException { BytesRef term = getAnalyzer().normalize(field, termStr); Term t = new Term(field, term); return newFuzzyQuery(t, minSimilarity, fuzzyPrefixLength); } // extracted from the .jj grammar Query handleBareTokenQuery( String qfield, Token term, Token fuzzySlop, boolean prefix, boolean wildcard, boolean fuzzy, boolean regexp) throws ParseException { Query q; String termImage = discardEscapeChar(term.image); if (wildcard) { q = getWildcardQuery(qfield, term.image); } else if (prefix) { q = getPrefixQuery( qfield, discardEscapeChar(term.image.substring(0, term.image.length() - 1))); } else if (regexp) { q = getRegexpQuery(qfield, term.image.substring(1, term.image.length() - 1)); } else if (fuzzy) { q = handleBareFuzzy(qfield, fuzzySlop, termImage); } else { q = getFieldQuery(qfield, termImage, false); } return q; } /** * Determines the similarity distance for the given fuzzy token and term string. * * <p>The default implementation uses the string image of the {@code fuzzyToken} in an attempt to * parse it to a primitive float value. Otherwise, the {@linkplain #getFuzzyMinSim() minimal * similarity} distance is returned. Subclasses can override this method to return a similarity * distance, say based on the {@code termStr}, if the {@code fuzzyToken} does not specify a * distance. * * @param fuzzyToken The Fuzzy token * @param termStr The Term string * @return The similarity distance */ protected float getFuzzyDistance(Token fuzzyToken, String termStr) { try { return Float.parseFloat(fuzzyToken.image.substring(1)); } catch ( @SuppressWarnings("unused") Exception ignored) { } return fuzzyMinSim; } Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException { float fms = getFuzzyDistance(fuzzySlop, termImage); if (fms < 0.0f) { throw new ParseException( "Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !"); } else if (fms >= 1.0f && fms != (int) fms) { throw new ParseException("Fractional edit distances are not allowed!"); } return getFuzzyQuery(qfield, termImage, fms); } // extracted from the .jj grammar Query handleQuotedTerm(String qfield, Token term, Token fuzzySlop) throws ParseException { int s = phraseSlop; // default if (fuzzySlop != null) { try { s = (int) Float.parseFloat(fuzzySlop.image.substring(1)); } catch ( @SuppressWarnings("unused") Exception ignored) { } } return getFieldQuery( qfield, discardEscapeChar(term.image.substring(1, term.image.length() - 1)), s); } // extracted from the .jj grammar Query handleBoost(Query q, Token boost) { if (boost != null) { float f = (float) 1.0; try { f = Float.parseFloat(boost.image); } catch ( @SuppressWarnings("unused") Exception ignored) { /* Should this be handled somehow? (defaults to "no boost", if * boost number is invalid) */ } // avoid boosting null queries, such as those caused by stop words if (q != null) { q = new BoostQuery(q, f); } } return q; } /** * Returns a String where the escape char has been removed, or kept only once if there was a * double escape. * * <p>Supports escaped Unicode characters, e.g. translates {@code \u005Cu0041} to {@code A}. */ String discardEscapeChar(String input) throws ParseException { // Create char array to hold unescaped char sequence char[] output = new char[input.length()]; // The length of the output can be less than the input // due to discarded escape chars. This variable holds // the actual length of the output int length = 0; // We remember whether the last processed character was // an escape character boolean lastCharWasEscapeChar = false; // The multiplier the current unicode digit must be multiplied with. // E.g. the first digit must be multiplied with 16^3, the second with 16^2... int codePointMultiplier = 0; // Used to calculate the codepoint of the escaped unicode character int codePoint = 0; for (int i = 0; i < input.length(); i++) { char curChar = input.charAt(i); if (codePointMultiplier > 0) { codePoint += hexToInt(curChar) * codePointMultiplier; codePointMultiplier >>>= 4; if (codePointMultiplier == 0) { output[length++] = (char) codePoint; codePoint = 0; } } else if (lastCharWasEscapeChar) { if (curChar == 'u') { // found an escaped unicode character codePointMultiplier = 16 * 16 * 16; } else { // this character was escaped output[length] = curChar; length++; } lastCharWasEscapeChar = false; } else { if (curChar == '\\') { lastCharWasEscapeChar = true; } else { output[length] = curChar; length++; } } } if (codePointMultiplier > 0) { throw new ParseException("Truncated Unicode escape sequence."); } if (lastCharWasEscapeChar) { throw new ParseException("Term can not end with escape character."); } return new String(output, 0, length); } /** Returns the numeric value of the hexadecimal character */ static int hexToInt(char c) throws ParseException { if ('0' <= c && c <= '9') { return c - '0'; } else if ('a' <= c && c <= 'f') { return c - 'a' + 10; } else if ('A' <= c && c <= 'F') { return c - 'A' + 10; } else { throw new ParseException("Non-hex character in Unicode escape sequence: " + c); } } /** * Returns a String where those characters that QueryParser expects to be escaped are escaped by a * preceding <code>\</code>. */ public static String escape(String s) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); // These characters are part of the query syntax and must be escaped if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':' || c == '^' || c == '[' || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~' || c == '*' || c == '?' || c == '|' || c == '&' || c == '/') { sb.append('\\'); } sb.append(c); } return sb.toString(); } }
openjdk/jdk8
35,345
jdk/src/share/classes/javax/management/MBeanServer.java
/* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.management; // java import import java.util.Set; import java.io.ObjectInputStream; // RI import import javax.management.loading.ClassLoaderRepository; /** * <p>This is the interface for MBean manipulation on the agent * side. It contains the methods necessary for the creation, * registration, and deletion of MBeans as well as the access methods * for registered MBeans. This is the core component of the JMX * infrastructure.</p> * * <p>User code does not usually implement this interface. Instead, * an object that implements this interface is obtained with one of * the methods in the {@link javax.management.MBeanServerFactory} class.</p> * * <p>Every MBean which is added to the MBean server becomes * manageable: its attributes and operations become remotely * accessible through the connectors/adaptors connected to that MBean * server. A Java object cannot be registered in the MBean server * unless it is a JMX compliant MBean.</p> * * <p id="notif">When an MBean is registered or unregistered in the * MBean server a {@link javax.management.MBeanServerNotification * MBeanServerNotification} Notification is emitted. To register an * object as listener to MBeanServerNotifications you should call the * MBean server method {@link #addNotificationListener * addNotificationListener} with <CODE>ObjectName</CODE> the * <CODE>ObjectName</CODE> of the {@link * javax.management.MBeanServerDelegate MBeanServerDelegate}. This * <CODE>ObjectName</CODE> is: <BR> * <CODE>JMImplementation:type=MBeanServerDelegate</CODE>.</p> * * <p>An object obtained from the {@link * MBeanServerFactory#createMBeanServer(String) createMBeanServer} or * {@link MBeanServerFactory#newMBeanServer(String) newMBeanServer} * methods of the {@link MBeanServerFactory} class applies security * checks to its methods, as follows.</p> * * <p>First, if there is no security manager ({@link * System#getSecurityManager()} is null), then an implementation of * this interface is free not to make any checks.</p> * * <p>Assuming that there is a security manager, or that the * implementation chooses to make checks anyway, the checks are made * as detailed below. In what follows, and unless otherwise specified, * {@code className} is the * string returned by {@link MBeanInfo#getClassName()} for the target * MBean.</p> * * <p>If a security check fails, the method throws {@link * SecurityException}.</p> * * <p>For methods that can throw {@link InstanceNotFoundException}, * this exception is thrown for a non-existent MBean, regardless of * permissions. This is because a non-existent MBean has no * <code>className</code>.</p> * * <ul> * * <li><p>For the {@link #invoke invoke} method, the caller's * permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, operationName, name, "invoke")}.</p> * * <li><p>For the {@link #getAttribute getAttribute} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, attribute, name, "getAttribute")}.</p> * * <li><p>For the {@link #getAttributes getAttributes} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "getAttribute")}. * Additionally, for each attribute <em>a</em> in the {@link * AttributeList}, if the caller's permissions do not imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, <em>a</em>, name, "getAttribute")}, the * MBean server will behave as if that attribute had not been in the * supplied list.</p> * * <li><p>For the {@link #setAttribute setAttribute} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, attrName, name, "setAttribute")}, where * <code>attrName</code> is {@link Attribute#getName() * attribute.getName()}.</p> * * <li><p>For the {@link #setAttributes setAttributes} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "setAttribute")}. * Additionally, for each attribute <em>a</em> in the {@link * AttributeList}, if the caller's permissions do not imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, <em>a</em>, name, "setAttribute")}, the * MBean server will behave as if that attribute had not been in the * supplied list.</p> * * <li><p>For the <code>addNotificationListener</code> methods, * the caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, * "addNotificationListener")}.</p> * * <li><p>For the <code>removeNotificationListener</code> methods, * the caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, * "removeNotificationListener")}.</p> * * <li><p>For the {@link #getMBeanInfo getMBeanInfo} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "getMBeanInfo")}.</p> * * <li><p>For the {@link #getObjectInstance getObjectInstance} method, * the caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "getObjectInstance")}.</p> * * <li><p>For the {@link #isInstanceOf isInstanceOf} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "isInstanceOf")}.</p> * * <li><p>For the {@link #queryMBeans queryMBeans} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(null, null, null, "queryMBeans")}. * Additionally, for each MBean <em>n</em> that matches <code>name</code>, * if the caller's permissions do not imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, <em>n</em>, "queryMBeans")}, the * MBean server will behave as if that MBean did not exist.</p> * * <p>Certain query elements perform operations on the MBean server. * If the caller does not have the required permissions for a given * MBean, that MBean will not be included in the result of the query. * The standard query elements that are affected are {@link * Query#attr(String)}, {@link Query#attr(String,String)}, and {@link * Query#classattr()}.</p> * * <li><p>For the {@link #queryNames queryNames} method, the checks * are the same as for <code>queryMBeans</code> except that * <code>"queryNames"</code> is used instead of * <code>"queryMBeans"</code> in the <code>MBeanPermission</code> * objects. Note that a <code>"queryMBeans"</code> permission implies * the corresponding <code>"queryNames"</code> permission.</p> * * <li><p>For the {@link #getDomains getDomains} method, the caller's * permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(null, null, null, "getDomains")}. Additionally, * for each domain <var>d</var> in the returned array, if the caller's * permissions do not imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(null, null, new ObjectName("<var>d</var>:x=x"), * "getDomains")}, the domain is eliminated from the array. Here, * <code>x=x</code> is any <var>key=value</var> pair, needed to * satisfy ObjectName's constructor but not otherwise relevant.</p> * * <li><p>For the {@link #getClassLoader getClassLoader} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, loaderName, * "getClassLoader")}.</p> * * <li><p>For the {@link #getClassLoaderFor getClassLoaderFor} method, * the caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, mbeanName, * "getClassLoaderFor")}.</p> * * <li><p>For the {@link #getClassLoaderRepository * getClassLoaderRepository} method, the caller's permissions must * imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(null, null, null, "getClassLoaderRepository")}.</p> * * <li><p>For the deprecated <code>deserialize</code> methods, the * required permissions are the same as for the methods that replace * them.</p> * * <li><p>For the <code>instantiate</code> methods, the caller's * permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, null, "instantiate")}, * where {@code className} is the name of the class which is to * be instantiated.</p> * * <li><p>For the {@link #registerMBean registerMBean} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "registerMBean")}. * * <p>If the <code>MBeanPermission</code> check succeeds, the MBean's * class is validated by checking that its {@link * java.security.ProtectionDomain ProtectionDomain} implies {@link * MBeanTrustPermission#MBeanTrustPermission(String) * MBeanTrustPermission("register")}.</p> * * <p>Finally, if the <code>name</code> argument is null, another * <code>MBeanPermission</code> check is made using the * <code>ObjectName</code> returned by {@link * MBeanRegistration#preRegister MBeanRegistration.preRegister}.</p> * * <li><p>For the <code>createMBean</code> methods, the caller's * permissions must imply the permissions needed by the equivalent * <code>instantiate</code> followed by * <code>registerMBean</code>.</p> * * <li><p>For the {@link #unregisterMBean unregisterMBean} method, * the caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(className, null, name, "unregisterMBean")}.</p> * * </ul> * * @since 1.5 */ /* DELETED: * * <li><p>For the {@link #isRegistered isRegistered} method, the * caller's permissions must imply {@link * MBeanPermission#MBeanPermission(String,String,ObjectName,String) * MBeanPermission(null, null, name, "isRegistered")}.</p> */ public interface MBeanServer extends MBeanServerConnection { /** * {@inheritDoc} * <p>If this method successfully creates an MBean, a notification * is sent as described <a href="#notif">above</a>.</p> * * @throws RuntimeOperationsException {@inheritDoc} * @throws RuntimeMBeanException {@inheritDoc} * @throws RuntimeErrorException {@inheritDoc} */ public ObjectInstance createMBean(String className, ObjectName name) throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException, NotCompliantMBeanException; /** * {@inheritDoc} * <p>If this method successfully creates an MBean, a notification * is sent as described <a href="#notif">above</a>.</p> * * @throws RuntimeOperationsException {@inheritDoc} * @throws RuntimeMBeanException {@inheritDoc} * @throws RuntimeErrorException {@inheritDoc} */ public ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName) throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException, NotCompliantMBeanException, InstanceNotFoundException; /** * {@inheritDoc} * <p>If this method successfully creates an MBean, a notification * is sent as described <a href="#notif">above</a>.</p> * * @throws RuntimeOperationsException {@inheritDoc} * @throws RuntimeMBeanException {@inheritDoc} * @throws RuntimeErrorException {@inheritDoc} */ public ObjectInstance createMBean(String className, ObjectName name, Object params[], String signature[]) throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException, NotCompliantMBeanException; /** * {@inheritDoc} * <p>If this method successfully creates an MBean, a notification * is sent as described <a href="#notif">above</a>.</p> * * @throws RuntimeOperationsException {@inheritDoc} * @throws RuntimeMBeanException {@inheritDoc} * @throws RuntimeErrorException {@inheritDoc} */ public ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName, Object params[], String signature[]) throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException, NotCompliantMBeanException, InstanceNotFoundException; /** * <p>Registers a pre-existing object as an MBean with the MBean * server. If the object name given is null, the MBean must * provide its own name by implementing the {@link * javax.management.MBeanRegistration MBeanRegistration} interface * and returning the name from the {@link * MBeanRegistration#preRegister preRegister} method. * * <p>If this method successfully registers an MBean, a notification * is sent as described <a href="#notif">above</a>.</p> * * @param object The MBean to be registered as an MBean. * @param name The object name of the MBean. May be null. * * @return An <CODE>ObjectInstance</CODE>, containing the * <CODE>ObjectName</CODE> and the Java class name of the newly * registered MBean. If the contained <code>ObjectName</code> * is <code>n</code>, the contained Java class name is * <code>{@link #getMBeanInfo getMBeanInfo(n)}.getClassName()</code>. * * @exception InstanceAlreadyExistsException The MBean is already * under the control of the MBean server. * @exception MBeanRegistrationException The * <CODE>preRegister</CODE> (<CODE>MBeanRegistration</CODE> * interface) method of the MBean has thrown an exception. The * MBean will not be registered. * @exception RuntimeMBeanException If the <CODE>postRegister</CODE> * (<CODE>MBeanRegistration</CODE> interface) method of the MBean throws a * <CODE>RuntimeException</CODE>, the <CODE>registerMBean</CODE> method will * throw a <CODE>RuntimeMBeanException</CODE>, although the MBean * registration succeeded. In such a case, the MBean will be actually * registered even though the <CODE>registerMBean</CODE> method * threw an exception. Note that <CODE>RuntimeMBeanException</CODE> can * also be thrown by <CODE>preRegister</CODE>, in which case the MBean * will not be registered. * @exception RuntimeErrorException If the <CODE>postRegister</CODE> * (<CODE>MBeanRegistration</CODE> interface) method of the MBean throws an * <CODE>Error</CODE>, the <CODE>registerMBean</CODE> method will * throw a <CODE>RuntimeErrorException</CODE>, although the MBean * registration succeeded. In such a case, the MBean will be actually * registered even though the <CODE>registerMBean</CODE> method * threw an exception. Note that <CODE>RuntimeErrorException</CODE> can * also be thrown by <CODE>preRegister</CODE>, in which case the MBean * will not be registered. * @exception NotCompliantMBeanException This object is not a JMX * compliant MBean * @exception RuntimeOperationsException Wraps a * <CODE>java.lang.IllegalArgumentException</CODE>: The object * passed in parameter is null or no object name is specified. * @see javax.management.MBeanRegistration */ public ObjectInstance registerMBean(Object object, ObjectName name) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException; /** * {@inheritDoc} * * <p>If this method successfully unregisters an MBean, a notification * is sent as described <a href="#notif">above</a>.</p> * * @throws RuntimeOperationsException {@inheritDoc} * @throws RuntimeMBeanException {@inheritDoc} * @throws RuntimeErrorException {@inheritDoc} */ public void unregisterMBean(ObjectName name) throws InstanceNotFoundException, MBeanRegistrationException; // doc comment inherited from MBeanServerConnection public ObjectInstance getObjectInstance(ObjectName name) throws InstanceNotFoundException; /** * {@inheritDoc} * @throws RuntimeOperationsException {@inheritDoc} */ public Set<ObjectInstance> queryMBeans(ObjectName name, QueryExp query); /** * {@inheritDoc} * @throws RuntimeOperationsException {@inheritDoc} */ public Set<ObjectName> queryNames(ObjectName name, QueryExp query); // doc comment inherited from MBeanServerConnection /** * @throws RuntimeOperationsException {@inheritDoc} */ public boolean isRegistered(ObjectName name); /** * Returns the number of MBeans registered in the MBean server. * * @return the number of registered MBeans, wrapped in an Integer. * If the caller's permissions are restricted, this number may * be greater than the number of MBeans the caller can access. */ public Integer getMBeanCount(); // doc comment inherited from MBeanServerConnection /** * @throws RuntimeOperationsException {@inheritDoc} */ public Object getAttribute(ObjectName name, String attribute) throws MBeanException, AttributeNotFoundException, InstanceNotFoundException, ReflectionException; // doc comment inherited from MBeanServerConnection /** * @throws RuntimeOperationsException {@inheritDoc} */ public AttributeList getAttributes(ObjectName name, String[] attributes) throws InstanceNotFoundException, ReflectionException; // doc comment inherited from MBeanServerConnection /** * @throws RuntimeOperationsException {@inheritDoc} */ public void setAttribute(ObjectName name, Attribute attribute) throws InstanceNotFoundException, AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException; // doc comment inherited from MBeanServerConnection /** * @throws RuntimeOperationsException {@inheritDoc} */ public AttributeList setAttributes(ObjectName name, AttributeList attributes) throws InstanceNotFoundException, ReflectionException; // doc comment inherited from MBeanServerConnection public Object invoke(ObjectName name, String operationName, Object params[], String signature[]) throws InstanceNotFoundException, MBeanException, ReflectionException; // doc comment inherited from MBeanServerConnection public String getDefaultDomain(); // doc comment inherited from MBeanServerConnection public String[] getDomains(); // doc comment inherited from MBeanServerConnection, plus: /** * {@inheritDoc} * If the source of the notification * is a reference to an MBean object, the MBean server will replace it * by that MBean's ObjectName. Otherwise the source is unchanged. */ public void addNotificationListener(ObjectName name, NotificationListener listener, NotificationFilter filter, Object handback) throws InstanceNotFoundException; /** * {@inheritDoc} * @throws RuntimeOperationsException {@inheritDoc} */ public void addNotificationListener(ObjectName name, ObjectName listener, NotificationFilter filter, Object handback) throws InstanceNotFoundException; // doc comment inherited from MBeanServerConnection public void removeNotificationListener(ObjectName name, ObjectName listener) throws InstanceNotFoundException, ListenerNotFoundException; // doc comment inherited from MBeanServerConnection public void removeNotificationListener(ObjectName name, ObjectName listener, NotificationFilter filter, Object handback) throws InstanceNotFoundException, ListenerNotFoundException; // doc comment inherited from MBeanServerConnection public void removeNotificationListener(ObjectName name, NotificationListener listener) throws InstanceNotFoundException, ListenerNotFoundException; // doc comment inherited from MBeanServerConnection public void removeNotificationListener(ObjectName name, NotificationListener listener, NotificationFilter filter, Object handback) throws InstanceNotFoundException, ListenerNotFoundException; // doc comment inherited from MBeanServerConnection public MBeanInfo getMBeanInfo(ObjectName name) throws InstanceNotFoundException, IntrospectionException, ReflectionException; // doc comment inherited from MBeanServerConnection public boolean isInstanceOf(ObjectName name, String className) throws InstanceNotFoundException; /** * <p>Instantiates an object using the list of all class loaders * registered in the MBean server's {@link * javax.management.loading.ClassLoaderRepository Class Loader * Repository}. The object's class should have a public * constructor. This method returns a reference to the newly * created object. The newly created object is not registered in * the MBean server.</p> * * <p>This method is equivalent to {@link * #instantiate(String,Object[],String[]) * instantiate(className, (Object[]) null, (String[]) null)}.</p> * * @param className The class name of the object to be instantiated. * * @return The newly instantiated object. * * @exception ReflectionException Wraps a * <CODE>java.lang.ClassNotFoundException</CODE> or the * <CODE>java.lang.Exception</CODE> that occurred when trying to * invoke the object's constructor. * @exception MBeanException The constructor of the object has * thrown an exception * @exception RuntimeOperationsException Wraps a * <CODE>java.lang.IllegalArgumentException</CODE>: The className * passed in parameter is null. */ public Object instantiate(String className) throws ReflectionException, MBeanException; /** * <p>Instantiates an object using the class Loader specified by its * <CODE>ObjectName</CODE>. If the loader name is null, the * ClassLoader that loaded the MBean Server will be used. The * object's class should have a public constructor. This method * returns a reference to the newly created object. The newly * created object is not registered in the MBean server.</p> * * <p>This method is equivalent to {@link * #instantiate(String,ObjectName,Object[],String[]) * instantiate(className, loaderName, (Object[]) null, (String[]) * null)}.</p> * * @param className The class name of the MBean to be instantiated. * @param loaderName The object name of the class loader to be used. * * @return The newly instantiated object. * * @exception ReflectionException Wraps a * <CODE>java.lang.ClassNotFoundException</CODE> or the * <CODE>java.lang.Exception</CODE> that occurred when trying to * invoke the object's constructor. * @exception MBeanException The constructor of the object has * thrown an exception. * @exception InstanceNotFoundException The specified class loader * is not registered in the MBeanServer. * @exception RuntimeOperationsException Wraps a * <CODE>java.lang.IllegalArgumentException</CODE>: The className * passed in parameter is null. */ public Object instantiate(String className, ObjectName loaderName) throws ReflectionException, MBeanException, InstanceNotFoundException; /** * <p>Instantiates an object using the list of all class loaders * registered in the MBean server {@link * javax.management.loading.ClassLoaderRepository Class Loader * Repository}. The object's class should have a public * constructor. The call returns a reference to the newly created * object. The newly created object is not registered in the * MBean server.</p> * * @param className The class name of the object to be instantiated. * @param params An array containing the parameters of the * constructor to be invoked. * @param signature An array containing the signature of the * constructor to be invoked. * * @return The newly instantiated object. * * @exception ReflectionException Wraps a * <CODE>java.lang.ClassNotFoundException</CODE> or the * <CODE>java.lang.Exception</CODE> that occurred when trying to * invoke the object's constructor. * @exception MBeanException The constructor of the object has * thrown an exception * @exception RuntimeOperationsException Wraps a * <CODE>java.lang.IllegalArgumentException</CODE>: The className * passed in parameter is null. */ public Object instantiate(String className, Object params[], String signature[]) throws ReflectionException, MBeanException; /** * <p>Instantiates an object. The class loader to be used is * identified by its object name. If the object name of the loader * is null, the ClassLoader that loaded the MBean server will be * used. The object's class should have a public constructor. * The call returns a reference to the newly created object. The * newly created object is not registered in the MBean server.</p> * * @param className The class name of the object to be instantiated. * @param params An array containing the parameters of the * constructor to be invoked. * @param signature An array containing the signature of the * constructor to be invoked. * @param loaderName The object name of the class loader to be used. * * @return The newly instantiated object. * * @exception ReflectionException Wraps a <CODE>java.lang.ClassNotFoundException</CODE> or the <CODE>java.lang.Exception</CODE> that * occurred when trying to invoke the object's constructor. * @exception MBeanException The constructor of the object has * thrown an exception * @exception InstanceNotFoundException The specified class loader * is not registered in the MBean server. * @exception RuntimeOperationsException Wraps a * <CODE>java.lang.IllegalArgumentException</CODE>: The className * passed in parameter is null. */ public Object instantiate(String className, ObjectName loaderName, Object params[], String signature[]) throws ReflectionException, MBeanException, InstanceNotFoundException; /** * <p>De-serializes a byte array in the context of the class loader * of an MBean.</p> * * @param name The name of the MBean whose class loader should be * used for the de-serialization. * @param data The byte array to be de-sererialized. * * @return The de-serialized object stream. * * @exception InstanceNotFoundException The MBean specified is not * found. * @exception OperationsException Any of the usual Input/Output * related exceptions. * * @deprecated Use {@link #getClassLoaderFor getClassLoaderFor} to * obtain the appropriate class loader for deserialization. */ @Deprecated public ObjectInputStream deserialize(ObjectName name, byte[] data) throws InstanceNotFoundException, OperationsException; /** * <p>De-serializes a byte array in the context of a given MBean * class loader. The class loader is found by loading the class * <code>className</code> through the {@link * javax.management.loading.ClassLoaderRepository Class Loader * Repository}. The resultant class's class loader is the one to * use. * * @param className The name of the class whose class loader should be * used for the de-serialization. * @param data The byte array to be de-sererialized. * * @return The de-serialized object stream. * * @exception OperationsException Any of the usual Input/Output * related exceptions. * @exception ReflectionException The specified class could not be * loaded by the class loader repository * * @deprecated Use {@link #getClassLoaderRepository} to obtain the * class loader repository and use it to deserialize. */ @Deprecated public ObjectInputStream deserialize(String className, byte[] data) throws OperationsException, ReflectionException; /** * <p>De-serializes a byte array in the context of a given MBean * class loader. The class loader is the one that loaded the * class with name "className". The name of the class loader to * be used for loading the specified class is specified. If null, * the MBean Server's class loader will be used.</p> * * @param className The name of the class whose class loader should be * used for the de-serialization. * @param data The byte array to be de-sererialized. * @param loaderName The name of the class loader to be used for * loading the specified class. If null, the MBean Server's class * loader will be used. * * @return The de-serialized object stream. * * @exception InstanceNotFoundException The specified class loader * MBean is not found. * @exception OperationsException Any of the usual Input/Output * related exceptions. * @exception ReflectionException The specified class could not be * loaded by the specified class loader. * * @deprecated Use {@link #getClassLoader getClassLoader} to obtain * the class loader for deserialization. */ @Deprecated public ObjectInputStream deserialize(String className, ObjectName loaderName, byte[] data) throws InstanceNotFoundException, OperationsException, ReflectionException; /** * <p>Return the {@link java.lang.ClassLoader} that was used for * loading the class of the named MBean.</p> * * @param mbeanName The ObjectName of the MBean. * * @return The ClassLoader used for that MBean. If <var>l</var> * is the MBean's actual ClassLoader, and <var>r</var> is the * returned value, then either: * * <ul> * <li><var>r</var> is identical to <var>l</var>; or * <li>the result of <var>r</var>{@link * ClassLoader#loadClass(String) .loadClass(<var>s</var>)} is the * same as <var>l</var>{@link ClassLoader#loadClass(String) * .loadClass(<var>s</var>)} for any string <var>s</var>. * </ul> * * What this means is that the ClassLoader may be wrapped in * another ClassLoader for security or other reasons. * * @exception InstanceNotFoundException if the named MBean is not found. * */ public ClassLoader getClassLoaderFor(ObjectName mbeanName) throws InstanceNotFoundException; /** * <p>Return the named {@link java.lang.ClassLoader}.</p> * * @param loaderName The ObjectName of the ClassLoader. May be * null, in which case the MBean server's own ClassLoader is * returned. * * @return The named ClassLoader. If <var>l</var> is the actual * ClassLoader with that name, and <var>r</var> is the returned * value, then either: * * <ul> * <li><var>r</var> is identical to <var>l</var>; or * <li>the result of <var>r</var>{@link * ClassLoader#loadClass(String) .loadClass(<var>s</var>)} is the * same as <var>l</var>{@link ClassLoader#loadClass(String) * .loadClass(<var>s</var>)} for any string <var>s</var>. * </ul> * * What this means is that the ClassLoader may be wrapped in * another ClassLoader for security or other reasons. * * @exception InstanceNotFoundException if the named ClassLoader is * not found. * */ public ClassLoader getClassLoader(ObjectName loaderName) throws InstanceNotFoundException; /** * <p>Return the ClassLoaderRepository for this MBeanServer. * @return The ClassLoaderRepository for this MBeanServer. * */ public ClassLoaderRepository getClassLoaderRepository(); }
googleapis/google-cloud-java
34,945
java-tpu/proto-google-cloud-tpu-v2alpha1/src/main/java/com/google/cloud/tpu/v2alpha1/NetworkConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/tpu/v2alpha1/cloud_tpu.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.tpu.v2alpha1; /** * * * <pre> * Network related configurations. * </pre> * * Protobuf type {@code google.cloud.tpu.v2alpha1.NetworkConfig} */ public final class NetworkConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.tpu.v2alpha1.NetworkConfig) NetworkConfigOrBuilder { private static final long serialVersionUID = 0L; // Use NetworkConfig.newBuilder() to construct. private NetworkConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private NetworkConfig() { network_ = ""; subnetwork_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new NetworkConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tpu.v2alpha1.CloudTpuProto .internal_static_google_cloud_tpu_v2alpha1_NetworkConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tpu.v2alpha1.CloudTpuProto .internal_static_google_cloud_tpu_v2alpha1_NetworkConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tpu.v2alpha1.NetworkConfig.class, com.google.cloud.tpu.v2alpha1.NetworkConfig.Builder.class); } public static final int NETWORK_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object network_ = ""; /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @return The network. */ @java.lang.Override public java.lang.String getNetwork() { java.lang.Object ref = network_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); network_ = s; return s; } } /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @return The bytes for network. */ @java.lang.Override public com.google.protobuf.ByteString getNetworkBytes() { java.lang.Object ref = network_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); network_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SUBNETWORK_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object subnetwork_ = ""; /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @return The subnetwork. */ @java.lang.Override public java.lang.String getSubnetwork() { java.lang.Object ref = subnetwork_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); subnetwork_ = s; return s; } } /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @return The bytes for subnetwork. */ @java.lang.Override public com.google.protobuf.ByteString getSubnetworkBytes() { java.lang.Object ref = subnetwork_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); subnetwork_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENABLE_EXTERNAL_IPS_FIELD_NUMBER = 3; private boolean enableExternalIps_ = false; /** * * * <pre> * Indicates that external IP addresses would be associated with the TPU * workers. If set to false, the specified subnetwork or network should have * Private Google Access enabled. * </pre> * * <code>bool enable_external_ips = 3;</code> * * @return The enableExternalIps. */ @java.lang.Override public boolean getEnableExternalIps() { return enableExternalIps_; } public static final int CAN_IP_FORWARD_FIELD_NUMBER = 4; private boolean canIpForward_ = false; /** * * * <pre> * Allows the TPU node to send and receive packets with non-matching * destination or source IPs. This is required if you plan to use the TPU * workers to forward routes. * </pre> * * <code>bool can_ip_forward = 4;</code> * * @return The canIpForward. */ @java.lang.Override public boolean getCanIpForward() { return canIpForward_; } public static final int QUEUE_COUNT_FIELD_NUMBER = 6; private int queueCount_ = 0; /** * * * <pre> * Optional. Specifies networking queue count for TPU VM instance's network * interface. * </pre> * * <code>int32 queue_count = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The queueCount. */ @java.lang.Override public int getQueueCount() { return queueCount_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(network_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, network_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnetwork_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, subnetwork_); } if (enableExternalIps_ != false) { output.writeBool(3, enableExternalIps_); } if (canIpForward_ != false) { output.writeBool(4, canIpForward_); } if (queueCount_ != 0) { output.writeInt32(6, queueCount_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(network_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, network_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnetwork_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, subnetwork_); } if (enableExternalIps_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, enableExternalIps_); } if (canIpForward_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, canIpForward_); } if (queueCount_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(6, queueCount_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.tpu.v2alpha1.NetworkConfig)) { return super.equals(obj); } com.google.cloud.tpu.v2alpha1.NetworkConfig other = (com.google.cloud.tpu.v2alpha1.NetworkConfig) obj; if (!getNetwork().equals(other.getNetwork())) return false; if (!getSubnetwork().equals(other.getSubnetwork())) return false; if (getEnableExternalIps() != other.getEnableExternalIps()) return false; if (getCanIpForward() != other.getCanIpForward()) return false; if (getQueueCount() != other.getQueueCount()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NETWORK_FIELD_NUMBER; hash = (53 * hash) + getNetwork().hashCode(); hash = (37 * hash) + SUBNETWORK_FIELD_NUMBER; hash = (53 * hash) + getSubnetwork().hashCode(); hash = (37 * hash) + ENABLE_EXTERNAL_IPS_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableExternalIps()); hash = (37 * hash) + CAN_IP_FORWARD_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getCanIpForward()); hash = (37 * hash) + QUEUE_COUNT_FIELD_NUMBER; hash = (53 * hash) + getQueueCount(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.tpu.v2alpha1.NetworkConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Network related configurations. * </pre> * * Protobuf type {@code google.cloud.tpu.v2alpha1.NetworkConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.tpu.v2alpha1.NetworkConfig) com.google.cloud.tpu.v2alpha1.NetworkConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.tpu.v2alpha1.CloudTpuProto .internal_static_google_cloud_tpu_v2alpha1_NetworkConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.tpu.v2alpha1.CloudTpuProto .internal_static_google_cloud_tpu_v2alpha1_NetworkConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.tpu.v2alpha1.NetworkConfig.class, com.google.cloud.tpu.v2alpha1.NetworkConfig.Builder.class); } // Construct using com.google.cloud.tpu.v2alpha1.NetworkConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; network_ = ""; subnetwork_ = ""; enableExternalIps_ = false; canIpForward_ = false; queueCount_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.tpu.v2alpha1.CloudTpuProto .internal_static_google_cloud_tpu_v2alpha1_NetworkConfig_descriptor; } @java.lang.Override public com.google.cloud.tpu.v2alpha1.NetworkConfig getDefaultInstanceForType() { return com.google.cloud.tpu.v2alpha1.NetworkConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.tpu.v2alpha1.NetworkConfig build() { com.google.cloud.tpu.v2alpha1.NetworkConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.tpu.v2alpha1.NetworkConfig buildPartial() { com.google.cloud.tpu.v2alpha1.NetworkConfig result = new com.google.cloud.tpu.v2alpha1.NetworkConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.tpu.v2alpha1.NetworkConfig result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.network_ = network_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.subnetwork_ = subnetwork_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.enableExternalIps_ = enableExternalIps_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.canIpForward_ = canIpForward_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.queueCount_ = queueCount_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.tpu.v2alpha1.NetworkConfig) { return mergeFrom((com.google.cloud.tpu.v2alpha1.NetworkConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.tpu.v2alpha1.NetworkConfig other) { if (other == com.google.cloud.tpu.v2alpha1.NetworkConfig.getDefaultInstance()) return this; if (!other.getNetwork().isEmpty()) { network_ = other.network_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getSubnetwork().isEmpty()) { subnetwork_ = other.subnetwork_; bitField0_ |= 0x00000002; onChanged(); } if (other.getEnableExternalIps() != false) { setEnableExternalIps(other.getEnableExternalIps()); } if (other.getCanIpForward() != false) { setCanIpForward(other.getCanIpForward()); } if (other.getQueueCount() != 0) { setQueueCount(other.getQueueCount()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { network_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { subnetwork_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { enableExternalIps_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { canIpForward_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 case 48: { queueCount_ = input.readInt32(); bitField0_ |= 0x00000010; break; } // case 48 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object network_ = ""; /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @return The network. */ public java.lang.String getNetwork() { java.lang.Object ref = network_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); network_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @return The bytes for network. */ public com.google.protobuf.ByteString getNetworkBytes() { java.lang.Object ref = network_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); network_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @param value The network to set. * @return This builder for chaining. */ public Builder setNetwork(java.lang.String value) { if (value == null) { throw new NullPointerException(); } network_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @return This builder for chaining. */ public Builder clearNetwork() { network_ = getDefaultInstance().getNetwork(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The name of the network for the TPU node. It must be a preexisting Google * Compute Engine network. If none is provided, "default" will be used. * </pre> * * <code>string network = 1;</code> * * @param value The bytes for network to set. * @return This builder for chaining. */ public Builder setNetworkBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); network_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object subnetwork_ = ""; /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @return The subnetwork. */ public java.lang.String getSubnetwork() { java.lang.Object ref = subnetwork_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); subnetwork_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @return The bytes for subnetwork. */ public com.google.protobuf.ByteString getSubnetworkBytes() { java.lang.Object ref = subnetwork_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); subnetwork_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @param value The subnetwork to set. * @return This builder for chaining. */ public Builder setSubnetwork(java.lang.String value) { if (value == null) { throw new NullPointerException(); } subnetwork_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @return This builder for chaining. */ public Builder clearSubnetwork() { subnetwork_ = getDefaultInstance().getSubnetwork(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The name of the subnetwork for the TPU node. It must be a preexisting * Google Compute Engine subnetwork. If none is provided, "default" will be * used. * </pre> * * <code>string subnetwork = 2;</code> * * @param value The bytes for subnetwork to set. * @return This builder for chaining. */ public Builder setSubnetworkBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); subnetwork_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean enableExternalIps_; /** * * * <pre> * Indicates that external IP addresses would be associated with the TPU * workers. If set to false, the specified subnetwork or network should have * Private Google Access enabled. * </pre> * * <code>bool enable_external_ips = 3;</code> * * @return The enableExternalIps. */ @java.lang.Override public boolean getEnableExternalIps() { return enableExternalIps_; } /** * * * <pre> * Indicates that external IP addresses would be associated with the TPU * workers. If set to false, the specified subnetwork or network should have * Private Google Access enabled. * </pre> * * <code>bool enable_external_ips = 3;</code> * * @param value The enableExternalIps to set. * @return This builder for chaining. */ public Builder setEnableExternalIps(boolean value) { enableExternalIps_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Indicates that external IP addresses would be associated with the TPU * workers. If set to false, the specified subnetwork or network should have * Private Google Access enabled. * </pre> * * <code>bool enable_external_ips = 3;</code> * * @return This builder for chaining. */ public Builder clearEnableExternalIps() { bitField0_ = (bitField0_ & ~0x00000004); enableExternalIps_ = false; onChanged(); return this; } private boolean canIpForward_; /** * * * <pre> * Allows the TPU node to send and receive packets with non-matching * destination or source IPs. This is required if you plan to use the TPU * workers to forward routes. * </pre> * * <code>bool can_ip_forward = 4;</code> * * @return The canIpForward. */ @java.lang.Override public boolean getCanIpForward() { return canIpForward_; } /** * * * <pre> * Allows the TPU node to send and receive packets with non-matching * destination or source IPs. This is required if you plan to use the TPU * workers to forward routes. * </pre> * * <code>bool can_ip_forward = 4;</code> * * @param value The canIpForward to set. * @return This builder for chaining. */ public Builder setCanIpForward(boolean value) { canIpForward_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Allows the TPU node to send and receive packets with non-matching * destination or source IPs. This is required if you plan to use the TPU * workers to forward routes. * </pre> * * <code>bool can_ip_forward = 4;</code> * * @return This builder for chaining. */ public Builder clearCanIpForward() { bitField0_ = (bitField0_ & ~0x00000008); canIpForward_ = false; onChanged(); return this; } private int queueCount_; /** * * * <pre> * Optional. Specifies networking queue count for TPU VM instance's network * interface. * </pre> * * <code>int32 queue_count = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The queueCount. */ @java.lang.Override public int getQueueCount() { return queueCount_; } /** * * * <pre> * Optional. Specifies networking queue count for TPU VM instance's network * interface. * </pre> * * <code>int32 queue_count = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The queueCount to set. * @return This builder for chaining. */ public Builder setQueueCount(int value) { queueCount_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Optional. Specifies networking queue count for TPU VM instance's network * interface. * </pre> * * <code>int32 queue_count = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearQueueCount() { bitField0_ = (bitField0_ & ~0x00000010); queueCount_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.tpu.v2alpha1.NetworkConfig) } // @@protoc_insertion_point(class_scope:google.cloud.tpu.v2alpha1.NetworkConfig) private static final com.google.cloud.tpu.v2alpha1.NetworkConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.tpu.v2alpha1.NetworkConfig(); } public static com.google.cloud.tpu.v2alpha1.NetworkConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<NetworkConfig> PARSER = new com.google.protobuf.AbstractParser<NetworkConfig>() { @java.lang.Override public NetworkConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<NetworkConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<NetworkConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.tpu.v2alpha1.NetworkConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,023
java-retail/google-cloud-retail/src/test/java/com/google/cloud/retail/v2beta/CatalogServiceClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.retail.v2beta; import static com.google.cloud.retail.v2beta.CatalogServiceClient.ListCatalogsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class CatalogServiceClientTest { private static MockCatalogService mockCatalogService; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private CatalogServiceClient client; @BeforeClass public static void startStaticServer() { mockCatalogService = new MockCatalogService(); mockLocations = new MockLocations(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockCatalogService, mockLocations)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); CatalogServiceSettings settings = CatalogServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = CatalogServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void listCatalogsTest() throws Exception { Catalog responsesElement = Catalog.newBuilder().build(); ListCatalogsResponse expectedResponse = ListCatalogsResponse.newBuilder() .setNextPageToken("") .addAllCatalogs(Arrays.asList(responsesElement)) .build(); mockCatalogService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListCatalogsRequest actualRequest = ((ListCatalogsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listCatalogsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listCatalogs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listCatalogsTest2() throws Exception { Catalog responsesElement = Catalog.newBuilder().build(); ListCatalogsResponse expectedResponse = ListCatalogsResponse.newBuilder() .setNextPageToken("") .addAllCatalogs(Arrays.asList(responsesElement)) .build(); mockCatalogService.addResponse(expectedResponse); String parent = "parent-995424086"; ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListCatalogsRequest actualRequest = ((ListCatalogsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listCatalogsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String parent = "parent-995424086"; client.listCatalogs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateCatalogTest() throws Exception { Catalog expectedResponse = Catalog.newBuilder() .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setDisplayName("displayName1714148973") .setProductLevelConfig(ProductLevelConfig.newBuilder().build()) .setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build()) .build(); mockCatalogService.addResponse(expectedResponse); Catalog catalog = Catalog.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); Catalog actualResponse = client.updateCatalog(catalog, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateCatalogRequest actualRequest = ((UpdateCatalogRequest) actualRequests.get(0)); Assert.assertEquals(catalog, actualRequest.getCatalog()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateCatalogExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { Catalog catalog = Catalog.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateCatalog(catalog, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setDefaultBranchTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockCatalogService.addResponse(expectedResponse); CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.setDefaultBranch(catalog); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetDefaultBranchRequest actualRequest = ((SetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog.toString(), actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setDefaultBranchExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.setDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setDefaultBranchTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockCatalogService.addResponse(expectedResponse); String catalog = "catalog555704345"; client.setDefaultBranch(catalog); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetDefaultBranchRequest actualRequest = ((SetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog, actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setDefaultBranchExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String catalog = "catalog555704345"; client.setDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDefaultBranchTest() throws Exception { GetDefaultBranchResponse expectedResponse = GetDefaultBranchResponse.newBuilder() .setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString()) .setSetTime(Timestamp.newBuilder().build()) .setNote("note3387378") .build(); mockCatalogService.addResponse(expectedResponse); CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDefaultBranchRequest actualRequest = ((GetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog.toString(), actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDefaultBranchExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.getDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDefaultBranchTest2() throws Exception { GetDefaultBranchResponse expectedResponse = GetDefaultBranchResponse.newBuilder() .setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString()) .setSetTime(Timestamp.newBuilder().build()) .setNote("note3387378") .build(); mockCatalogService.addResponse(expectedResponse); String catalog = "catalog555704345"; GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDefaultBranchRequest actualRequest = ((GetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog, actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDefaultBranchExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String catalog = "catalog555704345"; client.getDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getCompletionConfigTest() throws Exception { CompletionConfig expectedResponse = CompletionConfig.newBuilder() .setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setMatchingOrder("matchingOrder-1366761135") .setMaxSuggestions(618824852) .setMinPrefixLength(96853510) .setAutoLearning(true) .setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751") .setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastDenylistImportOperation("lastDenylistImportOperation1262341570") .setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689") .build(); mockCatalogService.addResponse(expectedResponse); CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); CompletionConfig actualResponse = client.getCompletionConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetCompletionConfigRequest actualRequest = ((GetCompletionConfigRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getCompletionConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.getCompletionConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getCompletionConfigTest2() throws Exception { CompletionConfig expectedResponse = CompletionConfig.newBuilder() .setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setMatchingOrder("matchingOrder-1366761135") .setMaxSuggestions(618824852) .setMinPrefixLength(96853510) .setAutoLearning(true) .setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751") .setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastDenylistImportOperation("lastDenylistImportOperation1262341570") .setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689") .build(); mockCatalogService.addResponse(expectedResponse); String name = "name3373707"; CompletionConfig actualResponse = client.getCompletionConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetCompletionConfigRequest actualRequest = ((GetCompletionConfigRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getCompletionConfigExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String name = "name3373707"; client.getCompletionConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateCompletionConfigTest() throws Exception { CompletionConfig expectedResponse = CompletionConfig.newBuilder() .setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setMatchingOrder("matchingOrder-1366761135") .setMaxSuggestions(618824852) .setMinPrefixLength(96853510) .setAutoLearning(true) .setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751") .setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastDenylistImportOperation("lastDenylistImportOperation1262341570") .setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689") .build(); mockCatalogService.addResponse(expectedResponse); CompletionConfig completionConfig = CompletionConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); CompletionConfig actualResponse = client.updateCompletionConfig(completionConfig, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateCompletionConfigRequest actualRequest = ((UpdateCompletionConfigRequest) actualRequests.get(0)); Assert.assertEquals(completionConfig, actualRequest.getCompletionConfig()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateCompletionConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CompletionConfig completionConfig = CompletionConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateCompletionConfig(completionConfig, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getAttributesConfigTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); AttributesConfig actualResponse = client.getAttributesConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetAttributesConfigRequest actualRequest = ((GetAttributesConfigRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getAttributesConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.getAttributesConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getAttributesConfigTest2() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); String name = "name3373707"; AttributesConfig actualResponse = client.getAttributesConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetAttributesConfigRequest actualRequest = ((GetAttributesConfigRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getAttributesConfigExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String name = "name3373707"; client.getAttributesConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateAttributesConfigTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); AttributesConfig attributesConfig = AttributesConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); AttributesConfig actualResponse = client.updateAttributesConfig(attributesConfig, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateAttributesConfigRequest actualRequest = ((UpdateAttributesConfigRequest) actualRequests.get(0)); Assert.assertEquals(attributesConfig, actualRequest.getAttributesConfig()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateAttributesConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { AttributesConfig attributesConfig = AttributesConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateAttributesConfig(attributesConfig, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void addCatalogAttributeTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); AddCatalogAttributeRequest request = AddCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .build(); AttributesConfig actualResponse = client.addCatalogAttribute(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AddCatalogAttributeRequest actualRequest = ((AddCatalogAttributeRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getCatalogAttribute(), actualRequest.getCatalogAttribute()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void addCatalogAttributeExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { AddCatalogAttributeRequest request = AddCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .build(); client.addCatalogAttribute(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void removeCatalogAttributeTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); RemoveCatalogAttributeRequest request = RemoveCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setKey("key106079") .build(); AttributesConfig actualResponse = client.removeCatalogAttribute(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RemoveCatalogAttributeRequest actualRequest = ((RemoveCatalogAttributeRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getKey(), actualRequest.getKey()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void removeCatalogAttributeExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { RemoveCatalogAttributeRequest request = RemoveCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setKey("key106079") .build(); client.removeCatalogAttribute(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void batchRemoveCatalogAttributesTest() throws Exception { BatchRemoveCatalogAttributesResponse expectedResponse = BatchRemoveCatalogAttributesResponse.newBuilder() .addAllDeletedCatalogAttributes(new ArrayList<String>()) .addAllResetCatalogAttributes(new ArrayList<String>()) .build(); mockCatalogService.addResponse(expectedResponse); BatchRemoveCatalogAttributesRequest request = BatchRemoveCatalogAttributesRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .addAllAttributeKeys(new ArrayList<String>()) .build(); BatchRemoveCatalogAttributesResponse actualResponse = client.batchRemoveCatalogAttributes(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); BatchRemoveCatalogAttributesRequest actualRequest = ((BatchRemoveCatalogAttributesRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getAttributeKeysList(), actualRequest.getAttributeKeysList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void batchRemoveCatalogAttributesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { BatchRemoveCatalogAttributesRequest request = BatchRemoveCatalogAttributesRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .addAllAttributeKeys(new ArrayList<String>()) .build(); client.batchRemoveCatalogAttributes(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void replaceCatalogAttributeTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); ReplaceCatalogAttributeRequest request = ReplaceCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); AttributesConfig actualResponse = client.replaceCatalogAttribute(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ReplaceCatalogAttributeRequest actualRequest = ((ReplaceCatalogAttributeRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getCatalogAttribute(), actualRequest.getCatalogAttribute()); Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void replaceCatalogAttributeExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { ReplaceCatalogAttributeRequest request = ReplaceCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.replaceCatalogAttribute(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/sdk-platform-java
34,966
java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/ListUsersResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: schema/google/showcase/v1beta1/identity.proto // Protobuf Java Version: 3.25.8 package com.google.showcase.v1beta1; /** * * * <pre> * The response message for the google.showcase.v1beta1.Identity&#92;ListUsers * method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ListUsersResponse} */ public final class ListUsersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.showcase.v1beta1.ListUsersResponse) ListUsersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListUsersResponse.newBuilder() to construct. private ListUsersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListUsersResponse() { users_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListUsersResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.IdentityOuterClass .internal_static_google_showcase_v1beta1_ListUsersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.IdentityOuterClass .internal_static_google_showcase_v1beta1_ListUsersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ListUsersResponse.class, com.google.showcase.v1beta1.ListUsersResponse.Builder.class); } public static final int USERS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.showcase.v1beta1.User> users_; /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ @java.lang.Override public java.util.List<com.google.showcase.v1beta1.User> getUsersList() { return users_; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.showcase.v1beta1.UserOrBuilder> getUsersOrBuilderList() { return users_; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ @java.lang.Override public int getUsersCount() { return users_.size(); } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.User getUsers(int index) { return users_.get(index); } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.UserOrBuilder getUsersOrBuilder(int index) { return users_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < users_.size(); i++) { output.writeMessage(1, users_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < users_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, users_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.showcase.v1beta1.ListUsersResponse)) { return super.equals(obj); } com.google.showcase.v1beta1.ListUsersResponse other = (com.google.showcase.v1beta1.ListUsersResponse) obj; if (!getUsersList().equals(other.getUsersList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getUsersCount() > 0) { hash = (37 * hash) + USERS_FIELD_NUMBER; hash = (53 * hash) + getUsersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ListUsersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListUsersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListUsersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.showcase.v1beta1.ListUsersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the google.showcase.v1beta1.Identity&#92;ListUsers * method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ListUsersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.ListUsersResponse) com.google.showcase.v1beta1.ListUsersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.IdentityOuterClass .internal_static_google_showcase_v1beta1_ListUsersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.IdentityOuterClass .internal_static_google_showcase_v1beta1_ListUsersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ListUsersResponse.class, com.google.showcase.v1beta1.ListUsersResponse.Builder.class); } // Construct using com.google.showcase.v1beta1.ListUsersResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (usersBuilder_ == null) { users_ = java.util.Collections.emptyList(); } else { users_ = null; usersBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.showcase.v1beta1.IdentityOuterClass .internal_static_google_showcase_v1beta1_ListUsersResponse_descriptor; } @java.lang.Override public com.google.showcase.v1beta1.ListUsersResponse getDefaultInstanceForType() { return com.google.showcase.v1beta1.ListUsersResponse.getDefaultInstance(); } @java.lang.Override public com.google.showcase.v1beta1.ListUsersResponse build() { com.google.showcase.v1beta1.ListUsersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.showcase.v1beta1.ListUsersResponse buildPartial() { com.google.showcase.v1beta1.ListUsersResponse result = new com.google.showcase.v1beta1.ListUsersResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.showcase.v1beta1.ListUsersResponse result) { if (usersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { users_ = java.util.Collections.unmodifiableList(users_); bitField0_ = (bitField0_ & ~0x00000001); } result.users_ = users_; } else { result.users_ = usersBuilder_.build(); } } private void buildPartial0(com.google.showcase.v1beta1.ListUsersResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.showcase.v1beta1.ListUsersResponse) { return mergeFrom((com.google.showcase.v1beta1.ListUsersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.showcase.v1beta1.ListUsersResponse other) { if (other == com.google.showcase.v1beta1.ListUsersResponse.getDefaultInstance()) return this; if (usersBuilder_ == null) { if (!other.users_.isEmpty()) { if (users_.isEmpty()) { users_ = other.users_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureUsersIsMutable(); users_.addAll(other.users_); } onChanged(); } } else { if (!other.users_.isEmpty()) { if (usersBuilder_.isEmpty()) { usersBuilder_.dispose(); usersBuilder_ = null; users_ = other.users_; bitField0_ = (bitField0_ & ~0x00000001); usersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUsersFieldBuilder() : null; } else { usersBuilder_.addAllMessages(other.users_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.showcase.v1beta1.User m = input.readMessage(com.google.showcase.v1beta1.User.parser(), extensionRegistry); if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.add(m); } else { usersBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.showcase.v1beta1.User> users_ = java.util.Collections.emptyList(); private void ensureUsersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { users_ = new java.util.ArrayList<com.google.showcase.v1beta1.User>(users_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.User, com.google.showcase.v1beta1.User.Builder, com.google.showcase.v1beta1.UserOrBuilder> usersBuilder_; /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.User> getUsersList() { if (usersBuilder_ == null) { return java.util.Collections.unmodifiableList(users_); } else { return usersBuilder_.getMessageList(); } } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public int getUsersCount() { if (usersBuilder_ == null) { return users_.size(); } else { return usersBuilder_.getCount(); } } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public com.google.showcase.v1beta1.User getUsers(int index) { if (usersBuilder_ == null) { return users_.get(index); } else { return usersBuilder_.getMessage(index); } } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder setUsers(int index, com.google.showcase.v1beta1.User value) { if (usersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.set(index, value); onChanged(); } else { usersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder setUsers(int index, com.google.showcase.v1beta1.User.Builder builderForValue) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.set(index, builderForValue.build()); onChanged(); } else { usersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder addUsers(com.google.showcase.v1beta1.User value) { if (usersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.add(value); onChanged(); } else { usersBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder addUsers(int index, com.google.showcase.v1beta1.User value) { if (usersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureUsersIsMutable(); users_.add(index, value); onChanged(); } else { usersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder addUsers(com.google.showcase.v1beta1.User.Builder builderForValue) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.add(builderForValue.build()); onChanged(); } else { usersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder addUsers(int index, com.google.showcase.v1beta1.User.Builder builderForValue) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.add(index, builderForValue.build()); onChanged(); } else { usersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder addAllUsers( java.lang.Iterable<? extends com.google.showcase.v1beta1.User> values) { if (usersBuilder_ == null) { ensureUsersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, users_); onChanged(); } else { usersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder clearUsers() { if (usersBuilder_ == null) { users_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { usersBuilder_.clear(); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public Builder removeUsers(int index) { if (usersBuilder_ == null) { ensureUsersIsMutable(); users_.remove(index); onChanged(); } else { usersBuilder_.remove(index); } return this; } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public com.google.showcase.v1beta1.User.Builder getUsersBuilder(int index) { return getUsersFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public com.google.showcase.v1beta1.UserOrBuilder getUsersOrBuilder(int index) { if (usersBuilder_ == null) { return users_.get(index); } else { return usersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public java.util.List<? extends com.google.showcase.v1beta1.UserOrBuilder> getUsersOrBuilderList() { if (usersBuilder_ != null) { return usersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(users_); } } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public com.google.showcase.v1beta1.User.Builder addUsersBuilder() { return getUsersFieldBuilder() .addBuilder(com.google.showcase.v1beta1.User.getDefaultInstance()); } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public com.google.showcase.v1beta1.User.Builder addUsersBuilder(int index) { return getUsersFieldBuilder() .addBuilder(index, com.google.showcase.v1beta1.User.getDefaultInstance()); } /** * * * <pre> * The list of users. * </pre> * * <code>repeated .google.showcase.v1beta1.User users = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.User.Builder> getUsersBuilderList() { return getUsersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.User, com.google.showcase.v1beta1.User.Builder, com.google.showcase.v1beta1.UserOrBuilder> getUsersFieldBuilder() { if (usersBuilder_ == null) { usersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.User, com.google.showcase.v1beta1.User.Builder, com.google.showcase.v1beta1.UserOrBuilder>( users_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); users_ = null; } return usersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListUsersRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Message&#92;ListUsers` method to retrieve the * next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.ListUsersResponse) } // @@protoc_insertion_point(class_scope:google.showcase.v1beta1.ListUsersResponse) private static final com.google.showcase.v1beta1.ListUsersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.showcase.v1beta1.ListUsersResponse(); } public static com.google.showcase.v1beta1.ListUsersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListUsersResponse> PARSER = new com.google.protobuf.AbstractParser<ListUsersResponse>() { @java.lang.Override public ListUsersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListUsersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListUsersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.showcase.v1beta1.ListUsersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/pulsar
35,275
pulsar-broker/src/test/java/org/apache/pulsar/broker/cache/BrokerEntryCacheTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.cache; import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import lombok.Cleanup; import lombok.extern.slf4j.Slf4j; import org.apache.bookkeeper.client.api.LedgerEntries; import org.apache.bookkeeper.mledger.ManagedLedgerFactoryMXBean; import org.apache.bookkeeper.mledger.impl.cache.RangeCacheTestUtil; import org.apache.commons.lang3.mutable.MutableInt; import org.apache.pulsar.broker.BrokerTestUtil; import org.apache.pulsar.broker.ServiceConfiguration; import org.apache.pulsar.broker.service.Ipv4Proxy; import org.apache.pulsar.client.api.Consumer; import org.apache.pulsar.client.api.KeySharedPolicy; import org.apache.pulsar.client.api.Message; import org.apache.pulsar.client.api.MessageListener; import org.apache.pulsar.client.api.Producer; import org.apache.pulsar.client.api.ProducerConsumerBase; import org.apache.pulsar.client.api.PulsarClient; import org.apache.pulsar.client.api.PulsarClientException; import org.apache.pulsar.client.api.Schema; import org.apache.pulsar.client.api.SubscriptionInitialPosition; import org.apache.pulsar.client.api.SubscriptionType; import org.apache.pulsar.common.util.FutureUtil; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; /** * Some end-to-end test for Broker cache. * The tests disabled by default are exploratory in nature and are not intended to be run as part of the * regular test suite. */ @Test(groups = "broker-api") @Slf4j public class BrokerEntryCacheTest extends ProducerConsumerBase { AtomicInteger bkReadCount = new AtomicInteger(0); @BeforeMethod @Override protected void setup() throws Exception { this.conf.setClusterName("test"); internalSetup(); producerBaseSetup(); bkReadCount.set(0); pulsarTestContext.getMockBookKeeper() .setReadHandleInterceptor((long ledgerId, long firstEntry, long lastEntry, LedgerEntries entries) -> { bkReadCount.incrementAndGet(); return CompletableFuture.completedFuture(entries); }); } @AfterMethod(alwaysRun = true) @Override protected void cleanup() throws Exception { internalCleanup(); } @Override protected ServiceConfiguration getDefaultConf() { ServiceConfiguration defaultConf = super.getDefaultConf(); // use cache eviction by expected read count, this is the default behavior so it's not necessary to set it, // but it makes the test more explicit defaultConf.setCacheEvictionByExpectedReadCount(true); // LRU cache eviction behavior is enabled by default, but we set it explicitly defaultConf.setManagedLedgerCacheEvictionExtendTTLOfRecentlyAccessed(true); // while performing exploratory testing, // uncomment one or many of these to compare with existing caching behavior //defaultConf.setManagedLedgerCacheEvictionExtendTTLOfRecentlyAccessed(false); //defaultConf.setCacheEvictionByExpectedReadCount(false); //configurePR12258Caching(defaultConf); //configureCacheEvictionByMarkDeletedPosition(defaultConf); return defaultConf; } /** * Configures ServiceConfiguration for cache eviction based on the slowest markDeletedPosition. * This method disables cache eviction by expected read count and enables eviction by markDeletedPosition. * Related to https://github.com/apache/pulsar/pull/14985 - "Evicting cache data by the slowest markDeletedPosition" * * @param defaultConf ServiceConfiguration instance to be configured */ private static void configureCacheEvictionByMarkDeletedPosition(ServiceConfiguration defaultConf) { defaultConf.setCacheEvictionByExpectedReadCount(false); defaultConf.setCacheEvictionByMarkDeletedPosition(true); } /** * Configures ServiceConfiguration with settings to test PR12258 behavior for caching to drain backlog consumers. * This method sets configurations to enable caching for cursors with backlogged messages. * To make PR12258 effective, there's an additional change made in the broker codebase to * activate the cursor when a consumer connects, instead of waiting for the scheduled task to activate it. * Check org.apache.pulsar.broker.service.persistent.PersistentSubscription#addConsumerInternal method * for the change. * @param defaultConf ServiceConfiguration instance to be modified */ private static void configurePR12258Caching(ServiceConfiguration defaultConf) { defaultConf.setCacheEvictionByExpectedReadCount(false); defaultConf.setManagedLedgerMinimumBacklogCursorsForCaching(1); defaultConf.setManagedLedgerMinimumBacklogEntriesForCaching(1); defaultConf.setManagedLedgerMaxBacklogBetweenCursorsForCaching(Integer.MAX_VALUE); defaultConf.setManagedLedgerCursorBackloggedThreshold(Long.MAX_VALUE); } // change enabled to true to run the test @Test(enabled = false) public void testTailingReadsKeySharedSlowConsumer() throws Exception { final String topicName = "persistent://my-property/my-ns/cache-test-topic"; final String subscriptionName = "test-subscription"; final int numConsumers = 10; final int numSubscriptions = numConsumers / 2; final int messagesPerSecond = 300 / numSubscriptions; final int testDurationSeconds = 10; final int numberOfKeys = numConsumers * 10; final int totalMessages = messagesPerSecond * testDurationSeconds; final long sizePerEntry = 68L; final int slowConsumerMessageProbability = 5; // 5% chance final int maxPauseForSlowConsumerMessageInMs = 500; // 1-500 ms pause // limit the cache size to a relatively small size (about 0.6 seconds of messages) pulsar.getDefaultManagedLedgerFactory().getEntryCacheManager() .updateCacheSizeAndThreshold((long) (1.1d * messagesPerSecond) * sizePerEntry); @Cleanup Producer<Long> producer = pulsarClient.newProducer(Schema.INT64) .topic(topicName) .enableBatching(false) .blockIfQueueFull(true) .create(); // Create consumers on the tail (reading from latest) Consumer<Long>[] consumers = new Consumer[numConsumers]; for (int i = 0; i < numConsumers; i++) { consumers[i] = pulsarClient.newConsumer(Schema.INT64) .topic(topicName) .receiverQueueSize(10) .subscriptionName(subscriptionName + "-" + (i % numSubscriptions)) .subscriptionType(SubscriptionType.Key_Shared) .subscriptionInitialPosition(SubscriptionInitialPosition.Latest) .keySharedPolicy(KeySharedPolicy.autoSplitHashRange()) .subscribe(); } ManagedLedgerFactoryMXBean cacheStats = pulsar.getDefaultManagedLedgerFactory().getCacheStats(); // Record initial cache metrics long initialCacheHits = cacheStats.getCacheHitsTotal(); long initialCacheMisses = cacheStats.getCacheMissesTotal(); // Start producer thread CountDownLatch producerLatch = new CountDownLatch(1); Thread producerThread = new Thread(() -> { try { long startTime = System.currentTimeMillis(); int messagesSent = 0; long messageId = 0; while (messagesSent < totalMessages) { long expectedTime = startTime + (messagesSent * 1000L / messagesPerSecond); long currentTime = System.currentTimeMillis(); if (currentTime < expectedTime) { Thread.sleep(expectedTime - currentTime); } long value = messageId++; long keyValue = value % numberOfKeys; byte[] keyBytes = new byte[Long.BYTES]; ByteBuffer keyBuffer = ByteBuffer.wrap(keyBytes); keyBuffer.putLong(keyValue); producer.newMessage().keyBytes(keyBytes).value(value).send(); messagesSent++; } log.info("Producer finished sending {} messages", messagesSent); } catch (Exception e) { log.error("Producer error", e); fail("Producer failed: " + e.getMessage()); } finally { producerLatch.countDown(); } }); // Start consumer threads CountDownLatch consumersLatch = new CountDownLatch(numConsumers); for (int i = 0; i < numConsumers; i++) { final int consumerId = i; Thread consumerThread = new Thread(() -> { Random random = new Random(); try { int messagesReceived = 0; long startTime = System.currentTimeMillis(); while (System.currentTimeMillis() - startTime < (testDurationSeconds + 2) * 1000) { try { Message<Long> message = consumers[consumerId].receive(1000, TimeUnit.MILLISECONDS); if (message != null) { // sleep for a random time with small probability to simulate slow consumer if (random.nextInt(100) < slowConsumerMessageProbability) { try { // Simulate slow consumer by sleeping for a random time Thread.sleep(random.nextInt(maxPauseForSlowConsumerMessageInMs) + 1); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } consumers[consumerId].acknowledge(message); messagesReceived++; } } catch (PulsarClientException.TimeoutException e) { // Expected timeout, continue } } log.info("Consumer {} received {} messages", consumerId, messagesReceived); } catch (Exception e) { log.error("Consumer {} error", consumerId, e); } finally { consumersLatch.countDown(); } }); consumerThread.start(); } // Start producer producerThread.start(); // Wait for test completion assertTrue(producerLatch.await(testDurationSeconds + 5, TimeUnit.SECONDS), "Producer should complete within timeout"); assertTrue(consumersLatch.await(testDurationSeconds + 10, TimeUnit.SECONDS), "Consumers should complete within timeout"); // Clean up consumers for (Consumer<Long> consumer : consumers) { consumer.close(); } // Get final cache metrics long finalCacheHits = cacheStats.getCacheHitsTotal(); long finalCacheMisses = cacheStats.getCacheMissesTotal(); // Calculate metrics similar to testStorageReadCacheMissesRate long cacheHitsDelta = finalCacheHits - initialCacheHits; long cacheMissesDelta = finalCacheMisses - initialCacheMisses; log.info("Cache metrics - Hits: {} -> {} (delta: {}), Misses: {} -> {} (delta: {})", initialCacheHits, finalCacheHits, cacheHitsDelta, initialCacheMisses, finalCacheMisses, cacheMissesDelta); log.info("Bk read count: {}", bkReadCount.get()); // Verify that cache activity occurred assertTrue(cacheHitsDelta + cacheMissesDelta > 0, "Expected cache activity (hits or misses) during the test"); // Verify metrics make sense for the workload assertTrue(cacheHitsDelta >= 0, "Cache hits should not decrease"); assertTrue(cacheMissesDelta >= 0, "Cache misses should not decrease"); // With multiple consumers reading from the tail, we expect some cache activity // The exact ratio depends on cache size and message patterns double totalCacheRequests = cacheHitsDelta + cacheMissesDelta; if (totalCacheRequests > 0) { double cacheHitRate = cacheHitsDelta / totalCacheRequests; log.info("Cache hit rate: {}%", String.format("%.2f", cacheHitRate * 100)); // With tail consumers, we might expect good cache hit rates // since recent messages are more likely to be cached assertTrue(cacheHitRate >= 0.0 && cacheHitRate <= 1.0, "Cache hit rate should be between 0 and 1"); } } // change enabled to true to run the test @Test(enabled = false) public void testCatchUpReadsWithFailureProxyDisconnectingAllConnections() throws Exception { final String topicName = "persistent://my-property/my-ns/cache-catchup-test-topic"; final String subscriptionName = "test-catchup-subscription"; final int numConsumers = 5; final int totalMessages = 1000; final int receiverQueueSize = 50; // Wire a failure proxy so that it's possible to disconnect broker connections forcefully @Cleanup("stop") Ipv4Proxy failureProxy = new Ipv4Proxy(0, "localhost", pulsar.getBrokerListenPort().get()); failureProxy.startup(); @Cleanup("stop") PulsarLookupProxy lookupProxy = new PulsarLookupProxy(0, pulsar.getWebService().getListenPortHTTP().get(), pulsar.getBrokerListenPort().get(), failureProxy.getLocalPort()); @Cleanup PulsarClient pulsarClient = PulsarClient.builder() .serviceUrl("http://localhost:" + lookupProxy.getBindPort()) .statsInterval(0, TimeUnit.SECONDS) .build(); @Cleanup Producer<Long> producer = pulsarClient.newProducer(Schema.INT64) .topic(topicName) .enableBatching(false) .blockIfQueueFull(true) .create(); // Create consumers in paused state with receiver queue size of 50 Consumer<Long>[] consumers = new Consumer[numConsumers]; for (int i = 0; i < numConsumers; i++) { consumers[i] = pulsarClient.newConsumer(Schema.INT64) .topic(topicName) .subscriptionName(subscriptionName + "-" + i) .subscriptionType(SubscriptionType.Shared) .subscriptionInitialPosition(SubscriptionInitialPosition.Earliest) .startPaused(true) // start consumers in paused state .receiverQueueSize(receiverQueueSize) .subscribe(); } ManagedLedgerFactoryMXBean cacheStats = pulsar.getDefaultManagedLedgerFactory().getCacheStats(); // Record initial cache metrics long initialCacheHits = cacheStats.getCacheHitsTotal(); long initialCacheMisses = cacheStats.getCacheMissesTotal(); // Produce all messages while consumers are paused log.info("Starting to produce {} messages", totalMessages); for (long messageId = 0; messageId < totalMessages; messageId++) { producer.send(messageId); } log.info("Finished producing {} messages", totalMessages); // Record cache metrics after production long afterProductionCacheHits = cacheStats.getCacheHitsTotal(); long afterProductionCacheMisses = cacheStats.getCacheMissesTotal(); // Unpause all consumers for (Consumer<Long> consumer : consumers) { consumer.resume(); } @Cleanup("interrupt") Thread failureInjector = new Thread(() -> { while (!Thread.currentThread().isInterrupted()) { try { // Simulate a failure by disconnecting the broker connections Thread.sleep(2000); // Wait for some messages to be consumed failureProxy.disconnectFrontChannels(); log.info("Injected failure by disconnecting all broker connections"); } catch (InterruptedException e) { log.info("Failure injector interrupted"); Thread.currentThread().interrupt(); } } }); failureInjector.start(); // Start consumer threads to read the catch-up messages CountDownLatch consumersLatch = new CountDownLatch(numConsumers); int[] messagesReceivedPerConsumer = new int[numConsumers]; for (int i = 0; i < numConsumers; i++) { final int consumerId = i; Thread consumerThread = new Thread(() -> { try { long startTime = System.currentTimeMillis(); ConcurrentHashMap<Long, Long> messagesReceived = new ConcurrentHashMap<>(); // Give consumers enough time to catch up while (messagesReceived.size() < totalMessages && System.currentTimeMillis() - startTime < 30000) { try { Message<Long> message = consumers[consumerId].receive(1000, TimeUnit.MILLISECONDS); Thread.sleep(20); // Simulate processing time if (message != null) { long messageId = message.getValue(); messagesReceived.put(messageId, messageId); consumers[consumerId].acknowledge(message); } } catch (PulsarClientException.TimeoutException e) { // Continue on timeout } } messagesReceivedPerConsumer[consumerId] = messagesReceived.size(); log.info("Consumer {} received {} messages", consumerId, messagesReceived.size()); } catch (Exception e) { log.error("Consumer {} error", consumerId, e); } finally { consumersLatch.countDown(); } }); consumerThread.start(); } // Wait for all consumers to complete assertTrue(consumersLatch.await(60, TimeUnit.SECONDS), "All consumers should complete catch-up reads within timeout"); failureInjector.interrupt(); failureInjector.join(); // Clean up consumers for (Consumer<Long> consumer : consumers) { consumer.close(); } // Get final cache metrics long finalCacheHits = cacheStats.getCacheHitsTotal(); long finalCacheMisses = cacheStats.getCacheMissesTotal(); // Calculate metrics long productionCacheHitsDelta = afterProductionCacheHits - initialCacheHits; long productionCacheMissesDelta = afterProductionCacheMisses - initialCacheMisses; long consumptionCacheHitsDelta = finalCacheHits - afterProductionCacheHits; long consumptionCacheMissesDelta = finalCacheMisses - afterProductionCacheMisses; log.info("Production phase - Cache hits delta: {}, Cache misses delta: {}", productionCacheHitsDelta, productionCacheMissesDelta); log.info("Consumption phase - Cache hits delta: {}, Cache misses delta: {}", consumptionCacheHitsDelta, consumptionCacheMissesDelta); log.info("Bk read count: {}", bkReadCount.get()); // Verify all consumers received all messages for (int i = 0; i < numConsumers; i++) { assertTrue(messagesReceivedPerConsumer[i] == totalMessages, String.format("Consumer %d should receive all %d messages, but received %d", i, totalMessages, messagesReceivedPerConsumer[i])); } // Verify cache activity occurred during consumption assertTrue(consumptionCacheHitsDelta + consumptionCacheMissesDelta > 0, "Expected cache activity during catch-up reads"); // For catch-up reads, we expect minimal cache misses since messages should be cached // or efficiently retrieved in sequence double totalConsumptionCacheRequests = consumptionCacheHitsDelta + consumptionCacheMissesDelta; if (totalConsumptionCacheRequests > 0) { double cacheHitRate = consumptionCacheHitsDelta / totalConsumptionCacheRequests; log.info("Consumption cache hit rate: {}%", String.format("%.2f", cacheHitRate * 100)); // For catch-up scenarios, we expect very few cache misses assertTrue(consumptionCacheMissesDelta == 0 || cacheHitRate > 0.6, String.format("Expected no cache misses or very high hit rate for catch-up reads. " + "Cache misses: %d, Hit rate: %.2f%%", consumptionCacheMissesDelta, cacheHitRate * 100)); } log.info("Catch-up read test completed successfully with {} consumers and {} messages", numConsumers, totalMessages); } @Test public void testTailingReadsClearsCacheAfterCacheTimeout() throws Exception { final String topicName = "persistent://my-property/my-ns/cache-test-topic"; final String subscriptionName = "test-subscription"; final int numConsumers = 5; final int messagesPerSecond = 150; final int testDurationSeconds = 3; final int totalMessages = messagesPerSecond * testDurationSeconds; @Cleanup Producer<Long> producer = pulsarClient.newProducer(Schema.INT64) .topic(topicName) .enableBatching(false) .blockIfQueueFull(true) .create(); // Create consumers on the tail (reading from latest) Consumer<Long>[] consumers = new Consumer[numConsumers]; for (int i = 0; i < numConsumers; i++) { consumers[i] = pulsarClient.newConsumer(Schema.INT64) .topic(topicName) .subscriptionName(subscriptionName + "-" + i) .subscriptionType(SubscriptionType.Exclusive) .subscriptionInitialPosition(SubscriptionInitialPosition.Latest) .subscribe(); } // Start producer thread CountDownLatch producerLatch = new CountDownLatch(1); Thread producerThread = new Thread(() -> { try { long startTime = System.currentTimeMillis(); int messagesSent = 0; long messageId = 0; while (messagesSent < totalMessages) { long expectedTime = startTime + (messagesSent * 1000L / messagesPerSecond); long currentTime = System.currentTimeMillis(); if (currentTime < expectedTime) { Thread.sleep(expectedTime - currentTime); } producer.send(messageId++); messagesSent++; } log.info("Producer finished sending {} messages", messagesSent); } catch (Exception e) { log.error("Producer error", e); fail("Producer failed: " + e.getMessage()); } finally { producerLatch.countDown(); } }); // Start consumer threads CountDownLatch consumersLatch = new CountDownLatch(numConsumers); for (int i = 0; i < numConsumers; i++) { final int consumerId = i; Thread consumerThread = new Thread(() -> { try { int messagesReceived = 0; while (!Thread.currentThread().isInterrupted() && messagesReceived < totalMessages) { try { Message<Long> message = consumers[consumerId].receive(1000, TimeUnit.MILLISECONDS); if (message != null) { consumers[consumerId].acknowledge(message); messagesReceived++; } } catch (PulsarClientException.TimeoutException e) { // Expected timeout, continue } } log.info("Consumer {} received {} messages", consumerId, messagesReceived); } catch (Exception e) { log.error("Consumer {} error", consumerId, e); } finally { consumersLatch.countDown(); } }); consumerThread.start(); } // Start producer producerThread.start(); // Wait for test completion assertTrue(producerLatch.await(testDurationSeconds + 5, TimeUnit.SECONDS), "Producer should complete within timeout"); assertTrue(consumersLatch.await(testDurationSeconds + 10, TimeUnit.SECONDS), "Consumers should complete within timeout"); RangeCacheTestUtil.forEachCachedEntry(pulsar, entry -> { if (entry.hasExpectedReads()) { assertThat(entry.getReadCountHandler().getExpectedReadCount()) .isEqualTo(0) .describedAs("Expected read count for entry " + entry.getPosition() + " is not zero"); } }); // sleep for 3 * cache eviction time threshold to count for TTL // and managedLedgerCacheEvictionExtendTTLOfRecentlyAccessed behavior Thread.sleep(3 * conf.getManagedLedgerCacheEvictionTimeThresholdMillis()); assertThat(pulsar.getDefaultManagedLedgerFactory().getEntryCacheManager().getSize()).isEqualTo(0L); // Clean up consumers for (Consumer<Long> consumer : consumers) { consumer.close(); } } @Test public void testExpectedReads() throws Exception { final String topicName = "persistent://my-property/my-ns/cache-test-topic"; final String subscriptionName = "test-subscription"; final int numConsumers = 5; final int messagesPerSecond = 150; final int testDurationSeconds = 3; final int totalMessages = messagesPerSecond * testDurationSeconds; @Cleanup Producer<Long> producer = pulsarClient.newProducer(Schema.INT64) .topic(topicName) .enableBatching(false) .blockIfQueueFull(true) .create(); // Create consumers on the tail (reading from latest) Consumer<Long>[] consumers = new Consumer[numConsumers]; for (int i = 0; i < numConsumers; i++) { consumers[i] = pulsarClient.newConsumer(Schema.INT64) .topic(topicName) .subscriptionName(subscriptionName + "-" + i) .subscriptionType(SubscriptionType.Exclusive) .subscriptionInitialPosition(SubscriptionInitialPosition.Latest) .receiverQueueSize(messagesPerSecond) .subscribe(); } // Start producer thread CountDownLatch producerLatch = new CountDownLatch(1); Thread producerThread = new Thread(() -> { try { long startTime = System.currentTimeMillis(); int messagesSent = 0; long messageId = 0; while (messagesSent < totalMessages) { long expectedTime = startTime + (messagesSent * 1000L / messagesPerSecond); long currentTime = System.currentTimeMillis(); if (currentTime < expectedTime) { Thread.sleep(expectedTime - currentTime); } producer.send(messageId++); messagesSent++; } log.info("Producer finished sending {} messages", messagesSent); } catch (Exception e) { log.error("Producer error", e); fail("Producer failed: " + e.getMessage()); } finally { producerLatch.countDown(); } }); // Start consumer threads CountDownLatch consumersLatch = new CountDownLatch(numConsumers - 1); Thread lastConsumerThread = null; for (int i = 0; i < numConsumers; i++) { final int consumerId = i; Thread consumerThread = new Thread(() -> { try { int messagesReceived = 0; while (!Thread.currentThread().isInterrupted() && messagesReceived < totalMessages) { try { Message<Long> message = consumers[consumerId].receive(1000, TimeUnit.MILLISECONDS); if (message != null) { consumers[consumerId].acknowledge(message); messagesReceived++; } } catch (PulsarClientException.TimeoutException e) { // Expected timeout, continue } } log.info("Consumer {} received {} messages", consumerId, messagesReceived); } catch (Exception e) { log.error("Consumer {} error", consumerId, e); } finally { consumersLatch.countDown(); } }); if (i != numConsumers - 1) { consumerThread.start(); } else { lastConsumerThread = consumerThread; } } // Start producer producerThread.start(); // Wait for test completion assertTrue(producerLatch.await(testDurationSeconds + 5, TimeUnit.SECONDS), "Producer should complete within timeout"); assertTrue(consumersLatch.await(testDurationSeconds + 10, TimeUnit.SECONDS), "Consumers should complete within timeout"); MutableInt expectedReadCountWith1 = new MutableInt(0); RangeCacheTestUtil.forEachCachedEntry(pulsar, entry -> { if (entry.hasExpectedReads()) { assertThat(entry.getReadCountHandler().getExpectedReadCount()) .isEqualTo(1) .describedAs("Expected read count for entry " + entry.getPosition() + " is not 1"); expectedReadCountWith1.increment(); } }); assertThat(expectedReadCountWith1.intValue()).isGreaterThan(0); lastConsumerThread.start(); lastConsumerThread.join(); // sleep for 3 * cache eviction time threshold to count for TTL // and managedLedgerCacheEvictionExtendTTLOfRecentlyAccessed behavior Thread.sleep(3 * conf.getManagedLedgerCacheEvictionTimeThresholdMillis()); // now the cache should be empty assertThat(pulsar.getDefaultManagedLedgerFactory().getEntryCacheManager().getSize()).isEqualTo(0L); // Clean up consumers for (Consumer<Long> consumer : consumers) { consumer.close(); } } // Test case for https://github.com/apache/pulsar/issues/16421 @Test public void testConsumerFlowOnSharedSubscriptionIssue16421() throws Exception { String topic = BrokerTestUtil.newUniqueName("persistent://my-property/my-ns/topic"); admin.topics().createNonPartitionedTopic(topic); String subName = "my-sub"; int numMessages = 20_000; final CountDownLatch count = new CountDownLatch(numMessages); try (Consumer<byte[]> consumer = pulsarClient.newConsumer() .subscriptionType(SubscriptionType.Shared) .topic(topic) .subscriptionName(subName) .messageListener(new MessageListener<byte[]>() { @Override public void received(Consumer<byte[]> consumer, Message<byte[]> msg) { //log.info("received {} - {}", msg, count.getCount()); consumer.acknowledgeAsync(msg); count.countDown(); } }) .subscribe(); Producer<byte[]> producer = pulsarClient .newProducer() .blockIfQueueFull(true) .enableBatching(true) .topic(topic) .create()) { consumer.pause(); byte[] message = "foo".getBytes(StandardCharsets.UTF_8); List<CompletableFuture<?>> futures = new ArrayList<>(); for (int i = 0; i < numMessages; i++) { futures.add(producer.sendAsync(message).whenComplete((id, e) -> { if (e != null) { log.error("error", e); } })); if (futures.size() == 1000) { FutureUtil.waitForAll(futures).get(); futures.clear(); } } producer.flush(); consumer.resume(); assertTrue(count.await(20, TimeUnit.SECONDS)); } // no BookKeeper reads should occur in this use case assertThat(bkReadCount.get()).isEqualTo(0); } }
googleapis/google-cloud-java
34,939
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListEnginesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/engine_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Request message for * [EngineService.ListEngines][google.cloud.discoveryengine.v1alpha.EngineService.ListEngines] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListEnginesRequest} */ public final class ListEnginesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListEnginesRequest) ListEnginesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListEnginesRequest.newBuilder() to construct. private ListEnginesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEnginesRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEnginesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.class, com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Not supported. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest other = (com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [EngineService.ListEngines][google.cloud.discoveryengine.v1alpha.EngineService.ListEngines] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListEnginesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListEnginesRequest) com.google.cloud.discoveryengine.v1alpha.ListEnginesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.class, com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.EngineServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_ListEnginesRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest build() { com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest buildPartial() { com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest result = new com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest other) { if (other == com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent resource name, such as * `projects/{project}/locations/{location}/collections/{collection_id}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Not supported. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. Not supported. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Filter by solution type. For example: * solution_type=SOLUTION_TYPE_SEARCH * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListEnginesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListEnginesRequest) private static final com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest(); } public static com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEnginesRequest> PARSER = new com.google.protobuf.AbstractParser<ListEnginesRequest>() { @java.lang.Override public ListEnginesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEnginesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEnginesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ListEnginesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
35,011
java-iam/proto-google-iam-v2/src/main/java/com/google/iam/v2/ListPoliciesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/iam/v2/policy.proto // Protobuf Java Version: 3.25.8 package com.google.iam.v2; /** * * * <pre> * Response message for `ListPolicies`. * </pre> * * Protobuf type {@code google.iam.v2.ListPoliciesResponse} */ public final class ListPoliciesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.iam.v2.ListPoliciesResponse) ListPoliciesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPoliciesResponse.newBuilder() to construct. private ListPoliciesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPoliciesResponse() { policies_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPoliciesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.v2.PolicyProto .internal_static_google_iam_v2_ListPoliciesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.v2.PolicyProto .internal_static_google_iam_v2_ListPoliciesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.v2.ListPoliciesResponse.class, com.google.iam.v2.ListPoliciesResponse.Builder.class); } public static final int POLICIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.iam.v2.Policy> policies_; /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ @java.lang.Override public java.util.List<com.google.iam.v2.Policy> getPoliciesList() { return policies_; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.iam.v2.PolicyOrBuilder> getPoliciesOrBuilderList() { return policies_; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ @java.lang.Override public int getPoliciesCount() { return policies_.size(); } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ @java.lang.Override public com.google.iam.v2.Policy getPolicies(int index) { return policies_.get(index); } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ @java.lang.Override public com.google.iam.v2.PolicyOrBuilder getPoliciesOrBuilder(int index) { return policies_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policies_.size(); i++) { output.writeMessage(1, policies_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policies_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policies_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.iam.v2.ListPoliciesResponse)) { return super.equals(obj); } com.google.iam.v2.ListPoliciesResponse other = (com.google.iam.v2.ListPoliciesResponse) obj; if (!getPoliciesList().equals(other.getPoliciesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPoliciesCount() > 0) { hash = (37 * hash) + POLICIES_FIELD_NUMBER; hash = (53 * hash) + getPoliciesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.iam.v2.ListPoliciesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v2.ListPoliciesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v2.ListPoliciesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.v2.ListPoliciesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.iam.v2.ListPoliciesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.v2.ListPoliciesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.iam.v2.ListPoliciesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for `ListPolicies`. * </pre> * * Protobuf type {@code google.iam.v2.ListPoliciesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.iam.v2.ListPoliciesResponse) com.google.iam.v2.ListPoliciesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.v2.PolicyProto .internal_static_google_iam_v2_ListPoliciesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.v2.PolicyProto .internal_static_google_iam_v2_ListPoliciesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.v2.ListPoliciesResponse.class, com.google.iam.v2.ListPoliciesResponse.Builder.class); } // Construct using com.google.iam.v2.ListPoliciesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policiesBuilder_ == null) { policies_ = java.util.Collections.emptyList(); } else { policies_ = null; policiesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.iam.v2.PolicyProto .internal_static_google_iam_v2_ListPoliciesResponse_descriptor; } @java.lang.Override public com.google.iam.v2.ListPoliciesResponse getDefaultInstanceForType() { return com.google.iam.v2.ListPoliciesResponse.getDefaultInstance(); } @java.lang.Override public com.google.iam.v2.ListPoliciesResponse build() { com.google.iam.v2.ListPoliciesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.iam.v2.ListPoliciesResponse buildPartial() { com.google.iam.v2.ListPoliciesResponse result = new com.google.iam.v2.ListPoliciesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.iam.v2.ListPoliciesResponse result) { if (policiesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policies_ = java.util.Collections.unmodifiableList(policies_); bitField0_ = (bitField0_ & ~0x00000001); } result.policies_ = policies_; } else { result.policies_ = policiesBuilder_.build(); } } private void buildPartial0(com.google.iam.v2.ListPoliciesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.iam.v2.ListPoliciesResponse) { return mergeFrom((com.google.iam.v2.ListPoliciesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.iam.v2.ListPoliciesResponse other) { if (other == com.google.iam.v2.ListPoliciesResponse.getDefaultInstance()) return this; if (policiesBuilder_ == null) { if (!other.policies_.isEmpty()) { if (policies_.isEmpty()) { policies_ = other.policies_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePoliciesIsMutable(); policies_.addAll(other.policies_); } onChanged(); } } else { if (!other.policies_.isEmpty()) { if (policiesBuilder_.isEmpty()) { policiesBuilder_.dispose(); policiesBuilder_ = null; policies_ = other.policies_; bitField0_ = (bitField0_ & ~0x00000001); policiesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPoliciesFieldBuilder() : null; } else { policiesBuilder_.addAllMessages(other.policies_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.iam.v2.Policy m = input.readMessage(com.google.iam.v2.Policy.parser(), extensionRegistry); if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.add(m); } else { policiesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.iam.v2.Policy> policies_ = java.util.Collections.emptyList(); private void ensurePoliciesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policies_ = new java.util.ArrayList<com.google.iam.v2.Policy>(policies_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v2.Policy, com.google.iam.v2.Policy.Builder, com.google.iam.v2.PolicyOrBuilder> policiesBuilder_; /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public java.util.List<com.google.iam.v2.Policy> getPoliciesList() { if (policiesBuilder_ == null) { return java.util.Collections.unmodifiableList(policies_); } else { return policiesBuilder_.getMessageList(); } } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public int getPoliciesCount() { if (policiesBuilder_ == null) { return policies_.size(); } else { return policiesBuilder_.getCount(); } } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public com.google.iam.v2.Policy getPolicies(int index) { if (policiesBuilder_ == null) { return policies_.get(index); } else { return policiesBuilder_.getMessage(index); } } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder setPolicies(int index, com.google.iam.v2.Policy value) { if (policiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePoliciesIsMutable(); policies_.set(index, value); onChanged(); } else { policiesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder setPolicies(int index, com.google.iam.v2.Policy.Builder builderForValue) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.set(index, builderForValue.build()); onChanged(); } else { policiesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder addPolicies(com.google.iam.v2.Policy value) { if (policiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePoliciesIsMutable(); policies_.add(value); onChanged(); } else { policiesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder addPolicies(int index, com.google.iam.v2.Policy value) { if (policiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePoliciesIsMutable(); policies_.add(index, value); onChanged(); } else { policiesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder addPolicies(com.google.iam.v2.Policy.Builder builderForValue) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.add(builderForValue.build()); onChanged(); } else { policiesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder addPolicies(int index, com.google.iam.v2.Policy.Builder builderForValue) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.add(index, builderForValue.build()); onChanged(); } else { policiesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder addAllPolicies(java.lang.Iterable<? extends com.google.iam.v2.Policy> values) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policies_); onChanged(); } else { policiesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder clearPolicies() { if (policiesBuilder_ == null) { policies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policiesBuilder_.clear(); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public Builder removePolicies(int index) { if (policiesBuilder_ == null) { ensurePoliciesIsMutable(); policies_.remove(index); onChanged(); } else { policiesBuilder_.remove(index); } return this; } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public com.google.iam.v2.Policy.Builder getPoliciesBuilder(int index) { return getPoliciesFieldBuilder().getBuilder(index); } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public com.google.iam.v2.PolicyOrBuilder getPoliciesOrBuilder(int index) { if (policiesBuilder_ == null) { return policies_.get(index); } else { return policiesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public java.util.List<? extends com.google.iam.v2.PolicyOrBuilder> getPoliciesOrBuilderList() { if (policiesBuilder_ != null) { return policiesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policies_); } } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public com.google.iam.v2.Policy.Builder addPoliciesBuilder() { return getPoliciesFieldBuilder().addBuilder(com.google.iam.v2.Policy.getDefaultInstance()); } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public com.google.iam.v2.Policy.Builder addPoliciesBuilder(int index) { return getPoliciesFieldBuilder() .addBuilder(index, com.google.iam.v2.Policy.getDefaultInstance()); } /** * * * <pre> * Metadata for the policies that are attached to the resource. * </pre> * * <code>repeated .google.iam.v2.Policy policies = 1;</code> */ public java.util.List<com.google.iam.v2.Policy.Builder> getPoliciesBuilderList() { return getPoliciesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v2.Policy, com.google.iam.v2.Policy.Builder, com.google.iam.v2.PolicyOrBuilder> getPoliciesFieldBuilder() { if (policiesBuilder_ == null) { policiesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v2.Policy, com.google.iam.v2.Policy.Builder, com.google.iam.v2.PolicyOrBuilder>( policies_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policies_ = null; } return policiesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A page token that you can use in a [ListPoliciesRequest][google.iam.v2.ListPoliciesRequest] to retrieve the * next page. If this field is omitted, there are no additional pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.iam.v2.ListPoliciesResponse) } // @@protoc_insertion_point(class_scope:google.iam.v2.ListPoliciesResponse) private static final com.google.iam.v2.ListPoliciesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.iam.v2.ListPoliciesResponse(); } public static com.google.iam.v2.ListPoliciesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPoliciesResponse> PARSER = new com.google.protobuf.AbstractParser<ListPoliciesResponse>() { @java.lang.Override public ListPoliciesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPoliciesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPoliciesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.iam.v2.ListPoliciesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
35,053
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMFullResourcePlan.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.metastore.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class WMFullResourcePlan implements org.apache.thrift.TBase<WMFullResourcePlan, WMFullResourcePlan._Fields>, java.io.Serializable, Cloneable, Comparable<WMFullResourcePlan> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WMFullResourcePlan"); private static final org.apache.thrift.protocol.TField PLAN_FIELD_DESC = new org.apache.thrift.protocol.TField("plan", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField POOLS_FIELD_DESC = new org.apache.thrift.protocol.TField("pools", org.apache.thrift.protocol.TType.LIST, (short)2); private static final org.apache.thrift.protocol.TField MAPPINGS_FIELD_DESC = new org.apache.thrift.protocol.TField("mappings", org.apache.thrift.protocol.TType.LIST, (short)3); private static final org.apache.thrift.protocol.TField TRIGGERS_FIELD_DESC = new org.apache.thrift.protocol.TField("triggers", org.apache.thrift.protocol.TType.LIST, (short)4); private static final org.apache.thrift.protocol.TField POOL_TRIGGERS_FIELD_DESC = new org.apache.thrift.protocol.TField("poolTriggers", org.apache.thrift.protocol.TType.LIST, (short)5); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new WMFullResourcePlanStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new WMFullResourcePlanTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable WMResourcePlan plan; // required private @org.apache.thrift.annotation.Nullable java.util.List<WMPool> pools; // required private @org.apache.thrift.annotation.Nullable java.util.List<WMMapping> mappings; // optional private @org.apache.thrift.annotation.Nullable java.util.List<WMTrigger> triggers; // optional private @org.apache.thrift.annotation.Nullable java.util.List<WMPoolTrigger> poolTriggers; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { PLAN((short)1, "plan"), POOLS((short)2, "pools"), MAPPINGS((short)3, "mappings"), TRIGGERS((short)4, "triggers"), POOL_TRIGGERS((short)5, "poolTriggers"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // PLAN return PLAN; case 2: // POOLS return POOLS; case 3: // MAPPINGS return MAPPINGS; case 4: // TRIGGERS return TRIGGERS; case 5: // POOL_TRIGGERS return POOL_TRIGGERS; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final _Fields optionals[] = {_Fields.MAPPINGS,_Fields.TRIGGERS,_Fields.POOL_TRIGGERS}; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.PLAN, new org.apache.thrift.meta_data.FieldMetaData("plan", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMResourcePlan.class))); tmpMap.put(_Fields.POOLS, new org.apache.thrift.meta_data.FieldMetaData("pools", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMPool.class)))); tmpMap.put(_Fields.MAPPINGS, new org.apache.thrift.meta_data.FieldMetaData("mappings", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMMapping.class)))); tmpMap.put(_Fields.TRIGGERS, new org.apache.thrift.meta_data.FieldMetaData("triggers", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMTrigger.class)))); tmpMap.put(_Fields.POOL_TRIGGERS, new org.apache.thrift.meta_data.FieldMetaData("poolTriggers", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMPoolTrigger.class)))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(WMFullResourcePlan.class, metaDataMap); } public WMFullResourcePlan() { } public WMFullResourcePlan( WMResourcePlan plan, java.util.List<WMPool> pools) { this(); this.plan = plan; this.pools = pools; } /** * Performs a deep copy on <i>other</i>. */ public WMFullResourcePlan(WMFullResourcePlan other) { if (other.isSetPlan()) { this.plan = new WMResourcePlan(other.plan); } if (other.isSetPools()) { java.util.List<WMPool> __this__pools = new java.util.ArrayList<WMPool>(other.pools.size()); for (WMPool other_element : other.pools) { __this__pools.add(new WMPool(other_element)); } this.pools = __this__pools; } if (other.isSetMappings()) { java.util.List<WMMapping> __this__mappings = new java.util.ArrayList<WMMapping>(other.mappings.size()); for (WMMapping other_element : other.mappings) { __this__mappings.add(new WMMapping(other_element)); } this.mappings = __this__mappings; } if (other.isSetTriggers()) { java.util.List<WMTrigger> __this__triggers = new java.util.ArrayList<WMTrigger>(other.triggers.size()); for (WMTrigger other_element : other.triggers) { __this__triggers.add(new WMTrigger(other_element)); } this.triggers = __this__triggers; } if (other.isSetPoolTriggers()) { java.util.List<WMPoolTrigger> __this__poolTriggers = new java.util.ArrayList<WMPoolTrigger>(other.poolTriggers.size()); for (WMPoolTrigger other_element : other.poolTriggers) { __this__poolTriggers.add(new WMPoolTrigger(other_element)); } this.poolTriggers = __this__poolTriggers; } } public WMFullResourcePlan deepCopy() { return new WMFullResourcePlan(this); } @Override public void clear() { this.plan = null; this.pools = null; this.mappings = null; this.triggers = null; this.poolTriggers = null; } @org.apache.thrift.annotation.Nullable public WMResourcePlan getPlan() { return this.plan; } public void setPlan(@org.apache.thrift.annotation.Nullable WMResourcePlan plan) { this.plan = plan; } public void unsetPlan() { this.plan = null; } /** Returns true if field plan is set (has been assigned a value) and false otherwise */ public boolean isSetPlan() { return this.plan != null; } public void setPlanIsSet(boolean value) { if (!value) { this.plan = null; } } public int getPoolsSize() { return (this.pools == null) ? 0 : this.pools.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<WMPool> getPoolsIterator() { return (this.pools == null) ? null : this.pools.iterator(); } public void addToPools(WMPool elem) { if (this.pools == null) { this.pools = new java.util.ArrayList<WMPool>(); } this.pools.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<WMPool> getPools() { return this.pools; } public void setPools(@org.apache.thrift.annotation.Nullable java.util.List<WMPool> pools) { this.pools = pools; } public void unsetPools() { this.pools = null; } /** Returns true if field pools is set (has been assigned a value) and false otherwise */ public boolean isSetPools() { return this.pools != null; } public void setPoolsIsSet(boolean value) { if (!value) { this.pools = null; } } public int getMappingsSize() { return (this.mappings == null) ? 0 : this.mappings.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<WMMapping> getMappingsIterator() { return (this.mappings == null) ? null : this.mappings.iterator(); } public void addToMappings(WMMapping elem) { if (this.mappings == null) { this.mappings = new java.util.ArrayList<WMMapping>(); } this.mappings.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<WMMapping> getMappings() { return this.mappings; } public void setMappings(@org.apache.thrift.annotation.Nullable java.util.List<WMMapping> mappings) { this.mappings = mappings; } public void unsetMappings() { this.mappings = null; } /** Returns true if field mappings is set (has been assigned a value) and false otherwise */ public boolean isSetMappings() { return this.mappings != null; } public void setMappingsIsSet(boolean value) { if (!value) { this.mappings = null; } } public int getTriggersSize() { return (this.triggers == null) ? 0 : this.triggers.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<WMTrigger> getTriggersIterator() { return (this.triggers == null) ? null : this.triggers.iterator(); } public void addToTriggers(WMTrigger elem) { if (this.triggers == null) { this.triggers = new java.util.ArrayList<WMTrigger>(); } this.triggers.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<WMTrigger> getTriggers() { return this.triggers; } public void setTriggers(@org.apache.thrift.annotation.Nullable java.util.List<WMTrigger> triggers) { this.triggers = triggers; } public void unsetTriggers() { this.triggers = null; } /** Returns true if field triggers is set (has been assigned a value) and false otherwise */ public boolean isSetTriggers() { return this.triggers != null; } public void setTriggersIsSet(boolean value) { if (!value) { this.triggers = null; } } public int getPoolTriggersSize() { return (this.poolTriggers == null) ? 0 : this.poolTriggers.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<WMPoolTrigger> getPoolTriggersIterator() { return (this.poolTriggers == null) ? null : this.poolTriggers.iterator(); } public void addToPoolTriggers(WMPoolTrigger elem) { if (this.poolTriggers == null) { this.poolTriggers = new java.util.ArrayList<WMPoolTrigger>(); } this.poolTriggers.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<WMPoolTrigger> getPoolTriggers() { return this.poolTriggers; } public void setPoolTriggers(@org.apache.thrift.annotation.Nullable java.util.List<WMPoolTrigger> poolTriggers) { this.poolTriggers = poolTriggers; } public void unsetPoolTriggers() { this.poolTriggers = null; } /** Returns true if field poolTriggers is set (has been assigned a value) and false otherwise */ public boolean isSetPoolTriggers() { return this.poolTriggers != null; } public void setPoolTriggersIsSet(boolean value) { if (!value) { this.poolTriggers = null; } } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case PLAN: if (value == null) { unsetPlan(); } else { setPlan((WMResourcePlan)value); } break; case POOLS: if (value == null) { unsetPools(); } else { setPools((java.util.List<WMPool>)value); } break; case MAPPINGS: if (value == null) { unsetMappings(); } else { setMappings((java.util.List<WMMapping>)value); } break; case TRIGGERS: if (value == null) { unsetTriggers(); } else { setTriggers((java.util.List<WMTrigger>)value); } break; case POOL_TRIGGERS: if (value == null) { unsetPoolTriggers(); } else { setPoolTriggers((java.util.List<WMPoolTrigger>)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case PLAN: return getPlan(); case POOLS: return getPools(); case MAPPINGS: return getMappings(); case TRIGGERS: return getTriggers(); case POOL_TRIGGERS: return getPoolTriggers(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case PLAN: return isSetPlan(); case POOLS: return isSetPools(); case MAPPINGS: return isSetMappings(); case TRIGGERS: return isSetTriggers(); case POOL_TRIGGERS: return isSetPoolTriggers(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof WMFullResourcePlan) return this.equals((WMFullResourcePlan)that); return false; } public boolean equals(WMFullResourcePlan that) { if (that == null) return false; if (this == that) return true; boolean this_present_plan = true && this.isSetPlan(); boolean that_present_plan = true && that.isSetPlan(); if (this_present_plan || that_present_plan) { if (!(this_present_plan && that_present_plan)) return false; if (!this.plan.equals(that.plan)) return false; } boolean this_present_pools = true && this.isSetPools(); boolean that_present_pools = true && that.isSetPools(); if (this_present_pools || that_present_pools) { if (!(this_present_pools && that_present_pools)) return false; if (!this.pools.equals(that.pools)) return false; } boolean this_present_mappings = true && this.isSetMappings(); boolean that_present_mappings = true && that.isSetMappings(); if (this_present_mappings || that_present_mappings) { if (!(this_present_mappings && that_present_mappings)) return false; if (!this.mappings.equals(that.mappings)) return false; } boolean this_present_triggers = true && this.isSetTriggers(); boolean that_present_triggers = true && that.isSetTriggers(); if (this_present_triggers || that_present_triggers) { if (!(this_present_triggers && that_present_triggers)) return false; if (!this.triggers.equals(that.triggers)) return false; } boolean this_present_poolTriggers = true && this.isSetPoolTriggers(); boolean that_present_poolTriggers = true && that.isSetPoolTriggers(); if (this_present_poolTriggers || that_present_poolTriggers) { if (!(this_present_poolTriggers && that_present_poolTriggers)) return false; if (!this.poolTriggers.equals(that.poolTriggers)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetPlan()) ? 131071 : 524287); if (isSetPlan()) hashCode = hashCode * 8191 + plan.hashCode(); hashCode = hashCode * 8191 + ((isSetPools()) ? 131071 : 524287); if (isSetPools()) hashCode = hashCode * 8191 + pools.hashCode(); hashCode = hashCode * 8191 + ((isSetMappings()) ? 131071 : 524287); if (isSetMappings()) hashCode = hashCode * 8191 + mappings.hashCode(); hashCode = hashCode * 8191 + ((isSetTriggers()) ? 131071 : 524287); if (isSetTriggers()) hashCode = hashCode * 8191 + triggers.hashCode(); hashCode = hashCode * 8191 + ((isSetPoolTriggers()) ? 131071 : 524287); if (isSetPoolTriggers()) hashCode = hashCode * 8191 + poolTriggers.hashCode(); return hashCode; } @Override public int compareTo(WMFullResourcePlan other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetPlan(), other.isSetPlan()); if (lastComparison != 0) { return lastComparison; } if (isSetPlan()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.plan, other.plan); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetPools(), other.isSetPools()); if (lastComparison != 0) { return lastComparison; } if (isSetPools()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pools, other.pools); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetMappings(), other.isSetMappings()); if (lastComparison != 0) { return lastComparison; } if (isSetMappings()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.mappings, other.mappings); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTriggers(), other.isSetTriggers()); if (lastComparison != 0) { return lastComparison; } if (isSetTriggers()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.triggers, other.triggers); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetPoolTriggers(), other.isSetPoolTriggers()); if (lastComparison != 0) { return lastComparison; } if (isSetPoolTriggers()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.poolTriggers, other.poolTriggers); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("WMFullResourcePlan("); boolean first = true; sb.append("plan:"); if (this.plan == null) { sb.append("null"); } else { sb.append(this.plan); } first = false; if (!first) sb.append(", "); sb.append("pools:"); if (this.pools == null) { sb.append("null"); } else { sb.append(this.pools); } first = false; if (isSetMappings()) { if (!first) sb.append(", "); sb.append("mappings:"); if (this.mappings == null) { sb.append("null"); } else { sb.append(this.mappings); } first = false; } if (isSetTriggers()) { if (!first) sb.append(", "); sb.append("triggers:"); if (this.triggers == null) { sb.append("null"); } else { sb.append(this.triggers); } first = false; } if (isSetPoolTriggers()) { if (!first) sb.append(", "); sb.append("poolTriggers:"); if (this.poolTriggers == null) { sb.append("null"); } else { sb.append(this.poolTriggers); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetPlan()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'plan' is unset! Struct:" + toString()); } if (!isSetPools()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'pools' is unset! Struct:" + toString()); } // check for sub-struct validity if (plan != null) { plan.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class WMFullResourcePlanStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public WMFullResourcePlanStandardScheme getScheme() { return new WMFullResourcePlanStandardScheme(); } } private static class WMFullResourcePlanStandardScheme extends org.apache.thrift.scheme.StandardScheme<WMFullResourcePlan> { public void read(org.apache.thrift.protocol.TProtocol iprot, WMFullResourcePlan struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // PLAN if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.plan = new WMResourcePlan(); struct.plan.read(iprot); struct.setPlanIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // POOLS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list1286 = iprot.readListBegin(); struct.pools = new java.util.ArrayList<WMPool>(_list1286.size); @org.apache.thrift.annotation.Nullable WMPool _elem1287; for (int _i1288 = 0; _i1288 < _list1286.size; ++_i1288) { _elem1287 = new WMPool(); _elem1287.read(iprot); struct.pools.add(_elem1287); } iprot.readListEnd(); } struct.setPoolsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // MAPPINGS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list1289 = iprot.readListBegin(); struct.mappings = new java.util.ArrayList<WMMapping>(_list1289.size); @org.apache.thrift.annotation.Nullable WMMapping _elem1290; for (int _i1291 = 0; _i1291 < _list1289.size; ++_i1291) { _elem1290 = new WMMapping(); _elem1290.read(iprot); struct.mappings.add(_elem1290); } iprot.readListEnd(); } struct.setMappingsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // TRIGGERS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list1292 = iprot.readListBegin(); struct.triggers = new java.util.ArrayList<WMTrigger>(_list1292.size); @org.apache.thrift.annotation.Nullable WMTrigger _elem1293; for (int _i1294 = 0; _i1294 < _list1292.size; ++_i1294) { _elem1293 = new WMTrigger(); _elem1293.read(iprot); struct.triggers.add(_elem1293); } iprot.readListEnd(); } struct.setTriggersIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // POOL_TRIGGERS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list1295 = iprot.readListBegin(); struct.poolTriggers = new java.util.ArrayList<WMPoolTrigger>(_list1295.size); @org.apache.thrift.annotation.Nullable WMPoolTrigger _elem1296; for (int _i1297 = 0; _i1297 < _list1295.size; ++_i1297) { _elem1296 = new WMPoolTrigger(); _elem1296.read(iprot); struct.poolTriggers.add(_elem1296); } iprot.readListEnd(); } struct.setPoolTriggersIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, WMFullResourcePlan struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.plan != null) { oprot.writeFieldBegin(PLAN_FIELD_DESC); struct.plan.write(oprot); oprot.writeFieldEnd(); } if (struct.pools != null) { oprot.writeFieldBegin(POOLS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.pools.size())); for (WMPool _iter1298 : struct.pools) { _iter1298.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.mappings != null) { if (struct.isSetMappings()) { oprot.writeFieldBegin(MAPPINGS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.mappings.size())); for (WMMapping _iter1299 : struct.mappings) { _iter1299.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.triggers != null) { if (struct.isSetTriggers()) { oprot.writeFieldBegin(TRIGGERS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.triggers.size())); for (WMTrigger _iter1300 : struct.triggers) { _iter1300.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.poolTriggers != null) { if (struct.isSetPoolTriggers()) { oprot.writeFieldBegin(POOL_TRIGGERS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.poolTriggers.size())); for (WMPoolTrigger _iter1301 : struct.poolTriggers) { _iter1301.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class WMFullResourcePlanTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public WMFullResourcePlanTupleScheme getScheme() { return new WMFullResourcePlanTupleScheme(); } } private static class WMFullResourcePlanTupleScheme extends org.apache.thrift.scheme.TupleScheme<WMFullResourcePlan> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, WMFullResourcePlan struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.plan.write(oprot); { oprot.writeI32(struct.pools.size()); for (WMPool _iter1302 : struct.pools) { _iter1302.write(oprot); } } java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetMappings()) { optionals.set(0); } if (struct.isSetTriggers()) { optionals.set(1); } if (struct.isSetPoolTriggers()) { optionals.set(2); } oprot.writeBitSet(optionals, 3); if (struct.isSetMappings()) { { oprot.writeI32(struct.mappings.size()); for (WMMapping _iter1303 : struct.mappings) { _iter1303.write(oprot); } } } if (struct.isSetTriggers()) { { oprot.writeI32(struct.triggers.size()); for (WMTrigger _iter1304 : struct.triggers) { _iter1304.write(oprot); } } } if (struct.isSetPoolTriggers()) { { oprot.writeI32(struct.poolTriggers.size()); for (WMPoolTrigger _iter1305 : struct.poolTriggers) { _iter1305.write(oprot); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, WMFullResourcePlan struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.plan = new WMResourcePlan(); struct.plan.read(iprot); struct.setPlanIsSet(true); { org.apache.thrift.protocol.TList _list1306 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); struct.pools = new java.util.ArrayList<WMPool>(_list1306.size); @org.apache.thrift.annotation.Nullable WMPool _elem1307; for (int _i1308 = 0; _i1308 < _list1306.size; ++_i1308) { _elem1307 = new WMPool(); _elem1307.read(iprot); struct.pools.add(_elem1307); } } struct.setPoolsIsSet(true); java.util.BitSet incoming = iprot.readBitSet(3); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list1309 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); struct.mappings = new java.util.ArrayList<WMMapping>(_list1309.size); @org.apache.thrift.annotation.Nullable WMMapping _elem1310; for (int _i1311 = 0; _i1311 < _list1309.size; ++_i1311) { _elem1310 = new WMMapping(); _elem1310.read(iprot); struct.mappings.add(_elem1310); } } struct.setMappingsIsSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list1312 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); struct.triggers = new java.util.ArrayList<WMTrigger>(_list1312.size); @org.apache.thrift.annotation.Nullable WMTrigger _elem1313; for (int _i1314 = 0; _i1314 < _list1312.size; ++_i1314) { _elem1313 = new WMTrigger(); _elem1313.read(iprot); struct.triggers.add(_elem1313); } } struct.setTriggersIsSet(true); } if (incoming.get(2)) { { org.apache.thrift.protocol.TList _list1315 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT); struct.poolTriggers = new java.util.ArrayList<WMPoolTrigger>(_list1315.size); @org.apache.thrift.annotation.Nullable WMPoolTrigger _elem1316; for (int _i1317 = 0; _i1317 < _list1315.size; ++_i1317) { _elem1316 = new WMPoolTrigger(); _elem1316.read(iprot); struct.poolTriggers.add(_elem1316); } } struct.setPoolTriggersIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
apache/maven-plugins
34,914
maven-assembly-plugin/src/test/java/org/apache/maven/plugins/assembly/io/DefaultAssemblyReaderTest.java
package org.apache.maven.plugins.assembly.io; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import junit.framework.TestCase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.model.Model; import org.apache.maven.plugins.assembly.AssemblerConfigurationSource; import org.apache.maven.plugins.assembly.InvalidAssemblerConfigurationException; import org.apache.maven.plugins.assembly.archive.DefaultAssemblyArchiverTest; import org.apache.maven.plugins.assembly.interpolation.AssemblyInterpolator; import org.apache.maven.plugins.assembly.model.Assembly; import org.apache.maven.plugins.assembly.model.Component; import org.apache.maven.plugins.assembly.model.ContainerDescriptorHandlerConfig; import org.apache.maven.plugins.assembly.model.DependencySet; import org.apache.maven.plugins.assembly.model.FileItem; import org.apache.maven.plugins.assembly.model.FileSet; import org.apache.maven.plugins.assembly.model.Repository; import org.apache.maven.plugins.assembly.model.io.xpp3.AssemblyXpp3Writer; import org.apache.maven.plugins.assembly.model.io.xpp3.ComponentXpp3Reader; import org.apache.maven.plugins.assembly.model.io.xpp3.ComponentXpp3Writer; import org.apache.maven.plugins.assembly.testutils.TestFileManager; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.interpolation.fixed.FixedStringSearchInterpolator; import org.codehaus.plexus.interpolation.fixed.InterpolationState; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.codehaus.plexus.util.IOUtil; import org.easymock.classextension.EasyMockSupport; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import static org.easymock.EasyMock.expect; public class DefaultAssemblyReaderTest extends TestCase { private TestFileManager fileManager; private EasyMockSupport mockManager; private AssemblerConfigurationSource configSource; public static StringReader writeToStringReader( Assembly assembly ) throws IOException { final StringWriter sw = new StringWriter(); final AssemblyXpp3Writer assemblyWriter = new AssemblyXpp3Writer(); assemblyWriter.write( sw, assembly ); return new StringReader( sw.toString() ); } @Override public void setUp() { fileManager = new TestFileManager( "assembly-reader.test.", ".xml" ); mockManager = new EasyMockSupport(); configSource = mockManager.createMock( AssemblerConfigurationSource.class ); ArtifactRepository localRepo = mockManager.createMock( ArtifactRepository.class ); expect( localRepo.getBasedir() ).andReturn( "/path/to/local/repo" ).anyTimes(); expect( configSource.getLocalRepository() ).andReturn( localRepo ).anyTimes(); expect( configSource.getRemoteRepositories() ).andReturn( Collections.<ArtifactRepository>emptyList() ).anyTimes(); expect( configSource.getMavenSession() ).andReturn( null ).anyTimes(); } @Override public void tearDown() throws IOException { fileManager.cleanUp(); } public void testIncludeSiteInAssembly_ShouldFailIfSiteDirectoryNonExistent() throws IOException { final File siteDir = File.createTempFile( "assembly-reader.", ".test" ); siteDir.delete(); expect( configSource.getSiteDirectory() ).andReturn( siteDir ).anyTimes(); final Assembly assembly = new Assembly(); mockManager.replayAll(); try { new DefaultAssemblyReader().includeSiteInAssembly( assembly, configSource ); fail( "Should fail when site directory is non-existent." ); } catch ( final InvalidAssemblerConfigurationException e ) { // this should happen. } mockManager.verifyAll(); } // public void testReadComponent_ShouldReadComponentFromXml() // throws IOException, AssemblyReadException // { // Component component = new Component(); // // FileSet fileSet = new FileSet(); // fileSet.setDirectory( "/dir" ); // // component.addFileSet( fileSet ); // // StringWriter sw = new StringWriter(); // // ComponentXpp3Writer componentWriter = new ComponentXpp3Writer(); // // componentWriter.write( sw, component ); // // Component result = new DefaultAssemblyReader().readComponent( new StringReader( sw.toString() ) ); // // List<FileSet> fileSets = result.getFileSets(); // // assertNotNull( fileSets ); // assertEquals( 1, fileSets.size() ); // // FileSet fs = (FileSet) fileSets.get( 0 ); // // assertEquals( "/dir", fs.getDirectory() ); // } // // public void testGetComponentFromFile_ShouldReadComponent() // throws IOException, AssemblyReadException // { // Component component = new Component(); // // FileSet fileSet = new FileSet(); // fileSet.setDirectory( "/dir" ); // // component.addFileSet( fileSet ); // // File componentFile = fileManager.createTempFile(); // // FileWriter writer = null; // // try // { // writer = new FileWriter( componentFile ); // // ComponentXpp3Writer componentWriter = new ComponentXpp3Writer(); // // componentWriter.write( writer, component ); // } // finally // { // IOUtil.close( writer ); // } // // File basedir = componentFile.getParentFile(); // String filename = componentFile.getName(); // // configSource.getBasedir(); // configSourceControl.setReturnValue( basedir ); // // mockManager.replayAll(); // // Component result = new DefaultAssemblyReader().getComponentFromFile( filename, configSource ); // // List<FileSet> fileSets = result.getFileSets(); // // assertNotNull( fileSets ); // assertEquals( 1, fileSets.size() ); // // FileSet fs = (FileSet) fileSets.get( 0 ); // // assertEquals( "/dir", fs.getDirectory() ); // // mockManager.verifyAll(); // } public void testIncludeSiteInAssembly_ShouldAddSiteDirFileSetWhenDirExists() throws IOException, InvalidAssemblerConfigurationException { final File siteDir = fileManager.createTempDir(); expect( configSource.getSiteDirectory() ).andReturn( siteDir ).anyTimes(); final Assembly assembly = new Assembly(); mockManager.replayAll(); new DefaultAssemblyReader().includeSiteInAssembly( assembly, configSource ); final List<FileSet> fileSets = assembly.getFileSets(); assertNotNull( fileSets ); assertEquals( 1, fileSets.size() ); final FileSet fs = fileSets.get( 0 ); assertEquals( siteDir.getPath(), fs.getDirectory() ); mockManager.verifyAll(); } public void testMergeComponentWithAssembly_ShouldAddOneFileSetToExistingListOfTwo() { final Assembly assembly = new Assembly(); FileSet fs = new FileSet(); fs.setDirectory( "/dir" ); assembly.addFileSet( fs ); fs = new FileSet(); fs.setDirectory( "/other-dir" ); assembly.addFileSet( fs ); fs = new FileSet(); fs.setDirectory( "/third-dir" ); final Component component = new Component(); component.addFileSet( fs ); new DefaultAssemblyReader().mergeComponentWithAssembly( component, assembly ); final List<FileSet> fileSets = assembly.getFileSets(); assertNotNull( fileSets ); assertEquals( 3, fileSets.size() ); final FileSet rfs1 = fileSets.get( 0 ); assertEquals( "/dir", rfs1.getDirectory() ); final FileSet rfs2 = fileSets.get( 1 ); assertEquals( "/other-dir", rfs2.getDirectory() ); final FileSet rfs3 = fileSets.get( 2 ); assertEquals( "/third-dir", rfs3.getDirectory() ); } public void testMergeComponentWithAssembly_ShouldAddOneFileItemToExistingListOfTwo() { final Assembly assembly = new Assembly(); FileItem fi = new FileItem(); fi.setSource( "file" ); assembly.addFile( fi ); fi = new FileItem(); fi.setSource( "file2" ); assembly.addFile( fi ); fi = new FileItem(); fi.setSource( "file3" ); final Component component = new Component(); component.addFile( fi ); new DefaultAssemblyReader().mergeComponentWithAssembly( component, assembly ); final List<FileItem> fileItems = assembly.getFiles(); assertNotNull( fileItems ); assertEquals( 3, fileItems.size() ); final FileItem rf1 = fileItems.get( 0 ); assertEquals( "file", rf1.getSource() ); final FileItem rf2 = fileItems.get( 1 ); assertEquals( "file2", rf2.getSource() ); final FileItem rf3 = fileItems.get( 2 ); assertEquals( "file3", rf3.getSource() ); } public void testMergeComponentWithAssembly_ShouldAddOneDependencySetToExistingListOfTwo() { final Assembly assembly = new Assembly(); DependencySet ds = new DependencySet(); ds.setScope( Artifact.SCOPE_RUNTIME ); assembly.addDependencySet( ds ); ds = new DependencySet(); ds.setScope( Artifact.SCOPE_COMPILE ); assembly.addDependencySet( ds ); final Component component = new Component(); ds = new DependencySet(); ds.setScope( Artifact.SCOPE_SYSTEM ); component.addDependencySet( ds ); new DefaultAssemblyReader().mergeComponentWithAssembly( component, assembly ); final List<DependencySet> depSets = assembly.getDependencySets(); assertNotNull( depSets ); assertEquals( 3, depSets.size() ); assertEquals( Artifact.SCOPE_RUNTIME, depSets.get( 0 ).getScope() ); assertEquals( Artifact.SCOPE_COMPILE, depSets.get( 1 ).getScope() ); assertEquals( Artifact.SCOPE_SYSTEM, depSets.get( 2 ).getScope() ); } public void testMergeComponentWithAssembly_ShouldAddOneRepositoryToExistingListOfTwo() { final Assembly assembly = new Assembly(); Repository repo = new Repository(); repo.setScope( Artifact.SCOPE_RUNTIME ); assembly.addRepository( repo ); repo = new Repository(); repo.setScope( Artifact.SCOPE_COMPILE ); assembly.addRepository( repo ); final Component component = new Component(); repo = new Repository(); repo.setScope( Artifact.SCOPE_SYSTEM ); component.addRepository( repo ); new DefaultAssemblyReader().mergeComponentWithAssembly( component, assembly ); final List<Repository> depSets = assembly.getRepositories(); assertNotNull( depSets ); assertEquals( 3, depSets.size() ); assertEquals( Artifact.SCOPE_RUNTIME, depSets.get( 0 ).getScope() ); assertEquals( Artifact.SCOPE_COMPILE, depSets.get( 1 ).getScope() ); assertEquals( Artifact.SCOPE_SYSTEM, depSets.get( 2 ).getScope() ); } // FIXME: Deep merging should take place... // public void // testMergeComponentWithAssembly_ShouldMergeOneFileSetToOneOfExistingTwo() // { // Assembly assembly = new Assembly(); // // FileSet fs = new FileSet(); // fs.setDirectory( "/dir" ); // fs.addInclude( "**/test.txt" ); // // assembly.addFileSet( fs ); // // fs = new FileSet(); // fs.setDirectory( "/other-dir" ); // assembly.addFileSet( fs ); // // fs = new FileSet(); // fs.setDirectory( "/dir" ); // fs.addInclude( "**/components.txt" ); // // Component component = new Component(); // // component.addFileSet( fs ); // // new DefaultAssemblyReader().mergeComponentWithAssembly( component, // assembly ); // // List<FileSet> fileSets = assembly.getFileSets(); // // assertNotNull( fileSets ); // assertEquals( 2, fileSets.size() ); // // FileSet rfs1 = (FileSet) fileSets.get( 0 ); // assertEquals( "/dir", rfs1.getDirectory() ); // // List includes = rfs1.getIncludes(); // // assertNotNull( includes ); // assertEquals( 2, includes.size() ); // assertTrue( includes.contains( "**/test.txt" ) ); // assertTrue( includes.contains( "**/components.txt" ) ); // // FileSet rfs2 = (FileSet) fileSets.get( 1 ); // assertEquals( "/other-dir", rfs2.getDirectory() ); // // } public void testMergeComponentWithAssembly_ShouldAddOneContainerDescriptorHandlerToExistingListOfTwo() { final Assembly assembly = new Assembly(); ContainerDescriptorHandlerConfig cfg = new ContainerDescriptorHandlerConfig(); cfg.setHandlerName( "one" ); assembly.addContainerDescriptorHandler( cfg ); cfg = new ContainerDescriptorHandlerConfig(); cfg.setHandlerName( "two" ); assembly.addContainerDescriptorHandler( cfg ); final Component component = new Component(); cfg = new ContainerDescriptorHandlerConfig(); cfg.setHandlerName( "three" ); component.addContainerDescriptorHandler( cfg ); new DefaultAssemblyReader().mergeComponentWithAssembly( component, assembly ); final List<ContainerDescriptorHandlerConfig> result = assembly.getContainerDescriptorHandlers(); assertNotNull( result ); assertEquals( 3, result.size() ); final Iterator<ContainerDescriptorHandlerConfig> it = result.iterator(); assertEquals( "one", it.next().getHandlerName() ); assertEquals( "two", it.next().getHandlerName() ); assertEquals( "three", it.next().getHandlerName() ); } public void testMergeComponentsWithMainAssembly_ShouldAddOneFileSetToAssembly() throws IOException, AssemblyReadException { final Component component = new Component(); final FileSet fileSet = new FileSet(); fileSet.setDirectory( "/dir" ); component.addFileSet( fileSet ); final File componentFile = fileManager.createTempFile(); Writer writer = null; try { writer = new OutputStreamWriter( new FileOutputStream( componentFile ), "UTF-8" ); final ComponentXpp3Writer componentWriter = new ComponentXpp3Writer(); componentWriter.write( writer, component ); writer.close(); writer = null; } finally { IOUtil.close( writer ); } final String filename = componentFile.getName(); final Assembly assembly = new Assembly(); assembly.addComponentDescriptor( filename ); final File basedir = componentFile.getParentFile(); final MavenProject project = new MavenProject(); expect( configSource.getProject() ).andReturn( project ).anyTimes(); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); InterpolationState is = new InterpolationState(); ComponentXpp3Reader.ContentTransformer componentIp = AssemblyInterpolator.componentInterpolator( FixedStringSearchInterpolator.create(), is, new ConsoleLogger( Logger.LEVEL_DEBUG, "console" ) ); mockManager.replayAll(); new DefaultAssemblyReader().mergeComponentsWithMainAssembly( assembly, null, configSource, componentIp ); final List<FileSet> fileSets = assembly.getFileSets(); assertNotNull( fileSets ); assertEquals( 1, fileSets.size() ); final FileSet fs = fileSets.get( 0 ); assertEquals( "/dir", fs.getDirectory() ); mockManager.verifyAll(); } public void testReadAssembly_ShouldReadAssemblyWithoutComponentsInterpolationOrSiteDirInclusion() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly = new Assembly(); assembly.setId( "test" ); final Assembly result = doReadAssembly( assembly ); assertEquals( assembly.getId(), result.getId() ); mockManager.verifyAll(); } public void testReadAssembly_ShouldReadAssemblyWithSiteDirInclusionFromAssemblyWithoutComponentsOrInterpolation() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly = new Assembly(); assembly.setId( "test" ); assembly.setIncludeSiteDirectory( true ); final StringReader sr = writeToStringReader( assembly ); final File siteDir = fileManager.createTempDir(); expect( configSource.getSiteDirectory() ).andReturn( siteDir ).anyTimes(); final File basedir = fileManager.createTempDir(); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); final Model model = new Model(); model.setGroupId( "group" ); model.setArtifactId( "artifact" ); model.setVersion( "version" ); final MavenProject project = new MavenProject( model ); expect( configSource.getProject() ).andReturn( project ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); mockManager.replayAll(); final Assembly result = new DefaultAssemblyReader().readAssembly( sr, "testLocation", null, configSource ); assertEquals( assembly.getId(), result.getId() ); final List<FileSet> fileSets = result.getFileSets(); assertEquals( 1, fileSets.size() ); assertEquals( "/site", fileSets.get( 0 ).getOutputDirectory() ); mockManager.verifyAll(); } public void testReadAssembly_ShouldReadAssemblyWithComponentWithoutSiteDirInclusionOrInterpolation() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final File componentsFile = fileManager.createTempFile(); final File basedir = componentsFile.getParentFile(); final String componentsFilename = componentsFile.getName(); final Component component = new Component(); final FileSet fs = new FileSet(); fs.setDirectory( "/dir" ); component.addFileSet( fs ); Writer fw = null; try { fw = new OutputStreamWriter( new FileOutputStream( componentsFile ), "UTF-8" ); new ComponentXpp3Writer().write( fw, component ); fw.close(); fw = null; } finally { IOUtil.close( fw ); } final Assembly assembly = new Assembly(); assembly.setId( "test" ); assembly.addComponentDescriptor( componentsFilename ); final StringReader sr = writeToStringReader( assembly ); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); final Model model = new Model(); model.setGroupId( "group" ); model.setArtifactId( "artifact" ); model.setVersion( "version" ); final MavenProject project = new MavenProject( model ); expect( configSource.getProject() ).andReturn( project ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); mockManager.replayAll(); final Assembly result = new DefaultAssemblyReader().readAssembly( sr, "testLocation", null, configSource ); assertEquals( assembly.getId(), result.getId() ); final List<FileSet> fileSets = result.getFileSets(); assertEquals( 1, fileSets.size() ); assertEquals( "/dir", fileSets.get( 0 ).getDirectory() ); mockManager.verifyAll(); } public void testReadAssembly_ShouldReadAssemblyWithComponentInterpolationWithoutSiteDirInclusionOrAssemblyInterpolation() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final File componentsFile = fileManager.createTempFile(); final File basedir = componentsFile.getParentFile(); final String componentsFilename = componentsFile.getName(); final Component component = new Component(); final FileSet fs = new FileSet(); fs.setDirectory( "${groupId}-dir" ); component.addFileSet( fs ); Writer fw = null; try { fw = new OutputStreamWriter( new FileOutputStream( componentsFile ), "UTF-8" ); new ComponentXpp3Writer().write( fw, component ); fw.close(); fw = null; } finally { IOUtil.close( fw ); } final Assembly assembly = new Assembly(); assembly.setId( "test" ); assembly.addComponentDescriptor( componentsFilename ); final StringReader sr = writeToStringReader( assembly ); expect( configSource.getBasedir() ).andReturn( basedir ).atLeastOnce(); final Model model = new Model(); model.setGroupId( "group" ); model.setArtifactId( "artifact" ); model.setVersion( "version" ); final MavenProject project = new MavenProject( model ); expect( configSource.getProject() ).andReturn( project ).atLeastOnce(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); mockManager.replayAll(); final Assembly result = new DefaultAssemblyReader().readAssembly( sr, "testLocation", null, configSource ); assertEquals( assembly.getId(), result.getId() ); final List<FileSet> fileSets = result.getFileSets(); assertEquals( 1, fileSets.size() ); assertEquals( "group-dir", fileSets.get( 0 ).getDirectory() ); mockManager.verifyAll(); } public void testReadAssembly_ShouldReadAssemblyWithInterpolationWithoutComponentsOrSiteDirInclusion() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly = new Assembly(); assembly.setId( "${groupId}-assembly" ); final Assembly result = doReadAssembly( assembly ); assertEquals( "group-assembly", result.getId() ); mockManager.verifyAll(); } private Assembly doReadAssembly( Assembly assembly ) throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final StringReader sr = writeToStringReader( assembly ); final File basedir = fileManager.createTempDir(); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); final Model model = new Model(); model.setGroupId( "group" ); model.setArtifactId( "artifact" ); model.setVersion( "version" ); final MavenProject project = new MavenProject( model ); expect( configSource.getProject() ).andReturn( project ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); mockManager.replayAll(); return new DefaultAssemblyReader().readAssembly( sr, "testLocation", null, configSource ); } public void testGetAssemblyFromDescriptorFile_ShouldReadAssembly() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly = new Assembly(); assembly.setId( "test" ); final FileSet fs = new FileSet(); fs.setDirectory( "/dir" ); assembly.addFileSet( fs ); final File assemblyFile = fileManager.createTempFile(); final File basedir = assemblyFile.getParentFile(); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); expect( configSource.getProject() ).andReturn( new MavenProject( new Model() ) ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); Writer writer = null; try { writer = new OutputStreamWriter( new FileOutputStream( assemblyFile ), "UTF-8" ); new AssemblyXpp3Writer().write( writer, assembly ); writer.close(); writer = null; } finally { IOUtil.close( writer ); } mockManager.replayAll(); final Assembly result = new DefaultAssemblyReader().getAssemblyFromDescriptorFile( assemblyFile, configSource ); assertEquals( assembly.getId(), result.getId() ); mockManager.verifyAll(); } public void testGetAssemblyForDescriptorReference_ShouldReadBinaryAssemblyRef() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final File basedir = fileManager.createTempDir(); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); expect( configSource.getProject() ).andReturn( new MavenProject( new Model() ) ).anyTimes(); expect( configSource.isIgnoreMissingDescriptor() ).andReturn( false ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); mockManager.replayAll(); final Assembly result = new DefaultAssemblyReader().getAssemblyForDescriptorReference( "bin", configSource ); assertEquals( "bin", result.getId() ); mockManager.verifyAll(); } public void testReadAssemblies_ShouldGetAssemblyDescriptorFromSingleFile() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly = new Assembly(); assembly.setId( "test" ); final FileSet fs = new FileSet(); fs.setDirectory( "/dir" ); assembly.addFileSet( fs ); final File basedir = fileManager.createTempDir(); final List<String> files = writeAssembliesToFile( Collections.singletonList( assembly ), basedir ); final String assemblyFile = files.get( 0 ); final List<Assembly> assemblies = performReadAssemblies( basedir, new String[] { assemblyFile }, null, null ); assertNotNull( assemblies ); assertEquals( 1, assemblies.size() ); final Assembly result = assemblies.get( 0 ); assertEquals( assembly.getId(), result.getId() ); } public void testReadAssemblies_ShouldFailWhenSingleDescriptorFileMissing() throws IOException, InvalidAssemblerConfigurationException { final File basedir = fileManager.createTempDir(); final File assemblyFile = new File( basedir, "test.xml" ); assemblyFile.delete(); try { performReadAssemblies( basedir, null, null, null, false ); fail( "Should fail when descriptor file is missing and ignoreDescriptors == false" ); } catch ( final AssemblyReadException e ) { // expected. } } public void testReadAssemblies_ShouldIgnoreMissingSingleDescriptorFileWhenIgnoreIsConfigured() throws IOException, InvalidAssemblerConfigurationException { final File basedir = fileManager.createTempDir(); final File assemblyFile = new File( basedir, "test.xml" ); assemblyFile.delete(); try { performReadAssemblies( basedir, null, null, null, true ); } catch ( final AssemblyReadException e ) { fail( "Setting ignoreMissingDescriptor == true (true flag in performReadAssemblies, above) should NOT " + "produce an exception." ); } } public void testReadAssemblies_ShouldGetAssemblyDescriptorFromFileArray() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly1 = new Assembly(); assembly1.setId( "test" ); final Assembly assembly2 = new Assembly(); assembly2.setId( "test2" ); final List<Assembly> assemblies = new ArrayList<Assembly>(); assemblies.add( assembly1 ); assemblies.add( assembly2 ); final File basedir = fileManager.createTempDir(); final List<String> files = writeAssembliesToFile( assemblies, basedir ); final List<Assembly> results = performReadAssemblies( basedir, files.toArray( new String[files.size()] ), null, null ); assertNotNull( results ); assertEquals( 2, results.size() ); final Assembly result1 = assemblies.get( 0 ); assertEquals( assembly1.getId(), result1.getId() ); final Assembly result2 = assemblies.get( 1 ); assertEquals( assembly2.getId(), result2.getId() ); } public void testReadAssemblies_ShouldGetAssemblyDescriptorFromMultipleRefs() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final File basedir = fileManager.createTempDir(); final List<Assembly> assemblies = performReadAssemblies( basedir, null, new String[]{ "bin", "src" }, null ); assertNotNull( assemblies ); assertEquals( 2, assemblies.size() ); final Assembly result = assemblies.get( 0 ); assertEquals( "bin", result.getId() ); final Assembly result2 = assemblies.get( 1 ); assertEquals( "src", result2.getId() ); } public void testReadAssemblies_ShouldGetAssemblyDescriptorFromDirectory() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly1 = new Assembly(); assembly1.setId( "test" ); final Assembly assembly2 = new Assembly(); assembly2.setId( "test2" ); final List<Assembly> assemblies = new ArrayList<Assembly>(); assemblies.add( assembly1 ); assemblies.add( assembly2 ); final File basedir = fileManager.createTempDir(); writeAssembliesToFile( assemblies, basedir ); final List<Assembly> results = performReadAssemblies( basedir, null, null, basedir ); assertNotNull( results ); assertEquals( 2, results.size() ); final Assembly result1 = assemblies.get( 0 ); assertEquals( assembly1.getId(), result1.getId() ); final Assembly result2 = assemblies.get( 1 ); assertEquals( assembly2.getId(), result2.getId() ); } public void testReadAssemblies_ShouldGetTwoAssemblyDescriptorsFromDirectoryWithThreeFiles() throws IOException, AssemblyReadException, InvalidAssemblerConfigurationException { final Assembly assembly1 = new Assembly(); assembly1.setId( "test" ); final Assembly assembly2 = new Assembly(); assembly2.setId( "test2" ); final List<Assembly> assemblies = new ArrayList<Assembly>(); assemblies.add( assembly1 ); assemblies.add( assembly2 ); final File basedir = fileManager.createTempDir(); writeAssembliesToFile( assemblies, basedir ); fileManager.createFile( basedir, "readme.txt", "This is just a readme file, not a descriptor." ); final List<Assembly> results = performReadAssemblies( basedir, null, null, basedir ); assertNotNull( results ); assertEquals( 2, results.size() ); final Assembly result1 = assemblies.get( 0 ); assertEquals( assembly1.getId(), result1.getId() ); final Assembly result2 = assemblies.get( 1 ); assertEquals( assembly2.getId(), result2.getId() ); } private List<String> writeAssembliesToFile( final List<Assembly> assemblies, final File dir ) throws IOException { final List<String> files = new ArrayList<String>(); for ( final Assembly assembly : assemblies ) { final File assemblyFile = new File( dir, assembly.getId() + ".xml" ); Writer writer = null; try { writer = new OutputStreamWriter( new FileOutputStream( assemblyFile ), "UTF-8" ); new AssemblyXpp3Writer().write( writer, assembly ); writer.close(); writer = null; } finally { IOUtil.close( writer ); } files.add( assemblyFile.getAbsolutePath() ); } return files; } private List<Assembly> performReadAssemblies( final File basedir, final String[] descriptors, final String[] descriptorRefs, final File descriptorDir ) throws AssemblyReadException, InvalidAssemblerConfigurationException { return performReadAssemblies( basedir, descriptors, descriptorRefs, descriptorDir, false ); } private List<Assembly> performReadAssemblies( final File basedir, final String[] descriptors, final String[] descriptorRefs, final File descriptorDir, final boolean ignoreMissing ) throws AssemblyReadException, InvalidAssemblerConfigurationException { expect( configSource.getDescriptorReferences() ).andReturn( descriptorRefs ); expect( configSource.getDescriptors() ).andReturn( descriptors ); expect( configSource.getDescriptorSourceDirectory() ).andReturn( descriptorDir ); expect( configSource.getBasedir() ).andReturn( basedir ).anyTimes(); expect( configSource.getProject() ).andReturn( new MavenProject( new Model() ) ).anyTimes(); expect( configSource.isIgnoreMissingDescriptor() ).andReturn( ignoreMissing ).anyTimes(); DefaultAssemblyArchiverTest.setupInterpolators( configSource ); mockManager.replayAll(); final List<Assembly> assemblies = new DefaultAssemblyReader().readAssemblies( configSource ); mockManager.verifyAll(); return assemblies; } }
googleads/google-ads-java
35,199
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/AssetGroupProductGroupView.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/resources/asset_group_product_group_view.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.resources; /** * <pre> * An asset group product group view. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.AssetGroupProductGroupView} */ public final class AssetGroupProductGroupView extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.resources.AssetGroupProductGroupView) AssetGroupProductGroupViewOrBuilder { private static final long serialVersionUID = 0L; // Use AssetGroupProductGroupView.newBuilder() to construct. private AssetGroupProductGroupView(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AssetGroupProductGroupView() { resourceName_ = ""; assetGroup_ = ""; assetGroupListingGroupFilter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AssetGroupProductGroupView(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v19_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v19_resources_AssetGroupProductGroupView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.class, com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ASSET_GROUP_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object assetGroup_ = ""; /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroup. */ @java.lang.Override public java.lang.String getAssetGroup() { java.lang.Object ref = assetGroup_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroup_ = s; return s; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroup. */ @java.lang.Override public com.google.protobuf.ByteString getAssetGroupBytes() { java.lang.Object ref = assetGroup_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroup_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ASSET_GROUP_LISTING_GROUP_FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object assetGroupListingGroupFilter_ = ""; /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroupListingGroupFilter. */ @java.lang.Override public java.lang.String getAssetGroupListingGroupFilter() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroupListingGroupFilter_ = s; return s; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroupListingGroupFilter. */ @java.lang.Override public com.google.protobuf.ByteString getAssetGroupListingGroupFilterBytes() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroupListingGroupFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroup_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, assetGroup_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroupListingGroupFilter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, assetGroupListingGroupFilter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroup_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, assetGroup_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroupListingGroupFilter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, assetGroupListingGroupFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.resources.AssetGroupProductGroupView)) { return super.equals(obj); } com.google.ads.googleads.v19.resources.AssetGroupProductGroupView other = (com.google.ads.googleads.v19.resources.AssetGroupProductGroupView) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (!getAssetGroup() .equals(other.getAssetGroup())) return false; if (!getAssetGroupListingGroupFilter() .equals(other.getAssetGroupListingGroupFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + ASSET_GROUP_FIELD_NUMBER; hash = (53 * hash) + getAssetGroup().hashCode(); hash = (37 * hash) + ASSET_GROUP_LISTING_GROUP_FILTER_FIELD_NUMBER; hash = (53 * hash) + getAssetGroupListingGroupFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.resources.AssetGroupProductGroupView prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * An asset group product group view. * </pre> * * Protobuf type {@code google.ads.googleads.v19.resources.AssetGroupProductGroupView} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.resources.AssetGroupProductGroupView) com.google.ads.googleads.v19.resources.AssetGroupProductGroupViewOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v19_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v19_resources_AssetGroupProductGroupView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.class, com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.Builder.class); } // Construct using com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; assetGroup_ = ""; assetGroupListingGroupFilter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v19_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.resources.AssetGroupProductGroupView getDefaultInstanceForType() { return com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.resources.AssetGroupProductGroupView build() { com.google.ads.googleads.v19.resources.AssetGroupProductGroupView result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.resources.AssetGroupProductGroupView buildPartial() { com.google.ads.googleads.v19.resources.AssetGroupProductGroupView result = new com.google.ads.googleads.v19.resources.AssetGroupProductGroupView(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.resources.AssetGroupProductGroupView result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.assetGroup_ = assetGroup_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.assetGroupListingGroupFilter_ = assetGroupListingGroupFilter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.resources.AssetGroupProductGroupView) { return mergeFrom((com.google.ads.googleads.v19.resources.AssetGroupProductGroupView)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.resources.AssetGroupProductGroupView other) { if (other == com.google.ads.googleads.v19.resources.AssetGroupProductGroupView.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getAssetGroup().isEmpty()) { assetGroup_ = other.assetGroup_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getAssetGroupListingGroupFilter().isEmpty()) { assetGroupListingGroupFilter_ = other.assetGroupListingGroupFilter_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { assetGroup_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { assetGroupListingGroupFilter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object assetGroup_ = ""; /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroup. */ public java.lang.String getAssetGroup() { java.lang.Object ref = assetGroup_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroup_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroup. */ public com.google.protobuf.ByteString getAssetGroupBytes() { java.lang.Object ref = assetGroup_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroup_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The assetGroup to set. * @return This builder for chaining. */ public Builder setAssetGroup( java.lang.String value) { if (value == null) { throw new NullPointerException(); } assetGroup_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAssetGroup() { assetGroup_ = getDefaultInstance().getAssetGroup(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for assetGroup to set. * @return This builder for chaining. */ public Builder setAssetGroupBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); assetGroup_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object assetGroupListingGroupFilter_ = ""; /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroupListingGroupFilter. */ public java.lang.String getAssetGroupListingGroupFilter() { java.lang.Object ref = assetGroupListingGroupFilter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroupListingGroupFilter_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroupListingGroupFilter. */ public com.google.protobuf.ByteString getAssetGroupListingGroupFilterBytes() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroupListingGroupFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The assetGroupListingGroupFilter to set. * @return This builder for chaining. */ public Builder setAssetGroupListingGroupFilter( java.lang.String value) { if (value == null) { throw new NullPointerException(); } assetGroupListingGroupFilter_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAssetGroupListingGroupFilter() { assetGroupListingGroupFilter_ = getDefaultInstance().getAssetGroupListingGroupFilter(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for assetGroupListingGroupFilter to set. * @return This builder for chaining. */ public Builder setAssetGroupListingGroupFilterBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); assetGroupListingGroupFilter_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.resources.AssetGroupProductGroupView) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.resources.AssetGroupProductGroupView) private static final com.google.ads.googleads.v19.resources.AssetGroupProductGroupView DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.resources.AssetGroupProductGroupView(); } public static com.google.ads.googleads.v19.resources.AssetGroupProductGroupView getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AssetGroupProductGroupView> PARSER = new com.google.protobuf.AbstractParser<AssetGroupProductGroupView>() { @java.lang.Override public AssetGroupProductGroupView parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AssetGroupProductGroupView> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AssetGroupProductGroupView> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.resources.AssetGroupProductGroupView getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,199
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/AssetGroupProductGroupView.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/resources/asset_group_product_group_view.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.resources; /** * <pre> * An asset group product group view. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.AssetGroupProductGroupView} */ public final class AssetGroupProductGroupView extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.resources.AssetGroupProductGroupView) AssetGroupProductGroupViewOrBuilder { private static final long serialVersionUID = 0L; // Use AssetGroupProductGroupView.newBuilder() to construct. private AssetGroupProductGroupView(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AssetGroupProductGroupView() { resourceName_ = ""; assetGroup_ = ""; assetGroupListingGroupFilter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AssetGroupProductGroupView(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v20_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v20_resources_AssetGroupProductGroupView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.class, com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ASSET_GROUP_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object assetGroup_ = ""; /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroup. */ @java.lang.Override public java.lang.String getAssetGroup() { java.lang.Object ref = assetGroup_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroup_ = s; return s; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroup. */ @java.lang.Override public com.google.protobuf.ByteString getAssetGroupBytes() { java.lang.Object ref = assetGroup_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroup_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ASSET_GROUP_LISTING_GROUP_FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object assetGroupListingGroupFilter_ = ""; /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroupListingGroupFilter. */ @java.lang.Override public java.lang.String getAssetGroupListingGroupFilter() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroupListingGroupFilter_ = s; return s; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroupListingGroupFilter. */ @java.lang.Override public com.google.protobuf.ByteString getAssetGroupListingGroupFilterBytes() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroupListingGroupFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroup_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, assetGroup_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroupListingGroupFilter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, assetGroupListingGroupFilter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroup_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, assetGroup_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroupListingGroupFilter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, assetGroupListingGroupFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.resources.AssetGroupProductGroupView)) { return super.equals(obj); } com.google.ads.googleads.v20.resources.AssetGroupProductGroupView other = (com.google.ads.googleads.v20.resources.AssetGroupProductGroupView) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (!getAssetGroup() .equals(other.getAssetGroup())) return false; if (!getAssetGroupListingGroupFilter() .equals(other.getAssetGroupListingGroupFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + ASSET_GROUP_FIELD_NUMBER; hash = (53 * hash) + getAssetGroup().hashCode(); hash = (37 * hash) + ASSET_GROUP_LISTING_GROUP_FILTER_FIELD_NUMBER; hash = (53 * hash) + getAssetGroupListingGroupFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.resources.AssetGroupProductGroupView prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * An asset group product group view. * </pre> * * Protobuf type {@code google.ads.googleads.v20.resources.AssetGroupProductGroupView} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.resources.AssetGroupProductGroupView) com.google.ads.googleads.v20.resources.AssetGroupProductGroupViewOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v20_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v20_resources_AssetGroupProductGroupView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.class, com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.Builder.class); } // Construct using com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; assetGroup_ = ""; assetGroupListingGroupFilter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v20_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.resources.AssetGroupProductGroupView getDefaultInstanceForType() { return com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.resources.AssetGroupProductGroupView build() { com.google.ads.googleads.v20.resources.AssetGroupProductGroupView result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.resources.AssetGroupProductGroupView buildPartial() { com.google.ads.googleads.v20.resources.AssetGroupProductGroupView result = new com.google.ads.googleads.v20.resources.AssetGroupProductGroupView(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.resources.AssetGroupProductGroupView result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.assetGroup_ = assetGroup_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.assetGroupListingGroupFilter_ = assetGroupListingGroupFilter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.resources.AssetGroupProductGroupView) { return mergeFrom((com.google.ads.googleads.v20.resources.AssetGroupProductGroupView)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.resources.AssetGroupProductGroupView other) { if (other == com.google.ads.googleads.v20.resources.AssetGroupProductGroupView.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getAssetGroup().isEmpty()) { assetGroup_ = other.assetGroup_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getAssetGroupListingGroupFilter().isEmpty()) { assetGroupListingGroupFilter_ = other.assetGroupListingGroupFilter_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { assetGroup_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { assetGroupListingGroupFilter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object assetGroup_ = ""; /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroup. */ public java.lang.String getAssetGroup() { java.lang.Object ref = assetGroup_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroup_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroup. */ public com.google.protobuf.ByteString getAssetGroupBytes() { java.lang.Object ref = assetGroup_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroup_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The assetGroup to set. * @return This builder for chaining. */ public Builder setAssetGroup( java.lang.String value) { if (value == null) { throw new NullPointerException(); } assetGroup_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAssetGroup() { assetGroup_ = getDefaultInstance().getAssetGroup(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for assetGroup to set. * @return This builder for chaining. */ public Builder setAssetGroupBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); assetGroup_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object assetGroupListingGroupFilter_ = ""; /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroupListingGroupFilter. */ public java.lang.String getAssetGroupListingGroupFilter() { java.lang.Object ref = assetGroupListingGroupFilter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroupListingGroupFilter_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroupListingGroupFilter. */ public com.google.protobuf.ByteString getAssetGroupListingGroupFilterBytes() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroupListingGroupFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The assetGroupListingGroupFilter to set. * @return This builder for chaining. */ public Builder setAssetGroupListingGroupFilter( java.lang.String value) { if (value == null) { throw new NullPointerException(); } assetGroupListingGroupFilter_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAssetGroupListingGroupFilter() { assetGroupListingGroupFilter_ = getDefaultInstance().getAssetGroupListingGroupFilter(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for assetGroupListingGroupFilter to set. * @return This builder for chaining. */ public Builder setAssetGroupListingGroupFilterBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); assetGroupListingGroupFilter_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.resources.AssetGroupProductGroupView) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.resources.AssetGroupProductGroupView) private static final com.google.ads.googleads.v20.resources.AssetGroupProductGroupView DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.resources.AssetGroupProductGroupView(); } public static com.google.ads.googleads.v20.resources.AssetGroupProductGroupView getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AssetGroupProductGroupView> PARSER = new com.google.protobuf.AbstractParser<AssetGroupProductGroupView>() { @java.lang.Override public AssetGroupProductGroupView parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AssetGroupProductGroupView> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AssetGroupProductGroupView> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.resources.AssetGroupProductGroupView getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,199
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/AssetGroupProductGroupView.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/resources/asset_group_product_group_view.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.resources; /** * <pre> * An asset group product group view. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.AssetGroupProductGroupView} */ public final class AssetGroupProductGroupView extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.AssetGroupProductGroupView) AssetGroupProductGroupViewOrBuilder { private static final long serialVersionUID = 0L; // Use AssetGroupProductGroupView.newBuilder() to construct. private AssetGroupProductGroupView(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AssetGroupProductGroupView() { resourceName_ = ""; assetGroup_ = ""; assetGroupListingGroupFilter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AssetGroupProductGroupView(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v21_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v21_resources_AssetGroupProductGroupView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.class, com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ASSET_GROUP_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object assetGroup_ = ""; /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroup. */ @java.lang.Override public java.lang.String getAssetGroup() { java.lang.Object ref = assetGroup_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroup_ = s; return s; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroup. */ @java.lang.Override public com.google.protobuf.ByteString getAssetGroupBytes() { java.lang.Object ref = assetGroup_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroup_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ASSET_GROUP_LISTING_GROUP_FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object assetGroupListingGroupFilter_ = ""; /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroupListingGroupFilter. */ @java.lang.Override public java.lang.String getAssetGroupListingGroupFilter() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroupListingGroupFilter_ = s; return s; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroupListingGroupFilter. */ @java.lang.Override public com.google.protobuf.ByteString getAssetGroupListingGroupFilterBytes() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroupListingGroupFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroup_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, assetGroup_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroupListingGroupFilter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, assetGroupListingGroupFilter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroup_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, assetGroup_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assetGroupListingGroupFilter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, assetGroupListingGroupFilter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.resources.AssetGroupProductGroupView)) { return super.equals(obj); } com.google.ads.googleads.v21.resources.AssetGroupProductGroupView other = (com.google.ads.googleads.v21.resources.AssetGroupProductGroupView) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (!getAssetGroup() .equals(other.getAssetGroup())) return false; if (!getAssetGroupListingGroupFilter() .equals(other.getAssetGroupListingGroupFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + ASSET_GROUP_FIELD_NUMBER; hash = (53 * hash) + getAssetGroup().hashCode(); hash = (37 * hash) + ASSET_GROUP_LISTING_GROUP_FILTER_FIELD_NUMBER; hash = (53 * hash) + getAssetGroupListingGroupFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.resources.AssetGroupProductGroupView prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * An asset group product group view. * </pre> * * Protobuf type {@code google.ads.googleads.v21.resources.AssetGroupProductGroupView} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.AssetGroupProductGroupView) com.google.ads.googleads.v21.resources.AssetGroupProductGroupViewOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v21_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v21_resources_AssetGroupProductGroupView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.class, com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.Builder.class); } // Construct using com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; resourceName_ = ""; assetGroup_ = ""; assetGroupListingGroupFilter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.resources.AssetGroupProductGroupViewProto.internal_static_google_ads_googleads_v21_resources_AssetGroupProductGroupView_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.resources.AssetGroupProductGroupView getDefaultInstanceForType() { return com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.resources.AssetGroupProductGroupView build() { com.google.ads.googleads.v21.resources.AssetGroupProductGroupView result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.resources.AssetGroupProductGroupView buildPartial() { com.google.ads.googleads.v21.resources.AssetGroupProductGroupView result = new com.google.ads.googleads.v21.resources.AssetGroupProductGroupView(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.resources.AssetGroupProductGroupView result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.assetGroup_ = assetGroup_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.assetGroupListingGroupFilter_ = assetGroupListingGroupFilter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.resources.AssetGroupProductGroupView) { return mergeFrom((com.google.ads.googleads.v21.resources.AssetGroupProductGroupView)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.resources.AssetGroupProductGroupView other) { if (other == com.google.ads.googleads.v21.resources.AssetGroupProductGroupView.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getAssetGroup().isEmpty()) { assetGroup_ = other.assetGroup_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getAssetGroupListingGroupFilter().isEmpty()) { assetGroupListingGroupFilter_ = other.assetGroupListingGroupFilter_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { assetGroup_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { assetGroupListingGroupFilter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group product group view. * Asset group product group view resource names have the form: * * `customers/{customer_id}/assetGroupProductGroupViews/{asset_group_id}~{listing_group_filter_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object assetGroup_ = ""; /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroup. */ public java.lang.String getAssetGroup() { java.lang.Object ref = assetGroup_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroup_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroup. */ public com.google.protobuf.ByteString getAssetGroupBytes() { java.lang.Object ref = assetGroup_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroup_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The assetGroup to set. * @return This builder for chaining. */ public Builder setAssetGroup( java.lang.String value) { if (value == null) { throw new NullPointerException(); } assetGroup_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAssetGroup() { assetGroup_ = getDefaultInstance().getAssetGroup(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Output only. The asset group associated with the listing group filter. * </pre> * * <code>string asset_group = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for assetGroup to set. * @return This builder for chaining. */ public Builder setAssetGroupBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); assetGroup_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object assetGroupListingGroupFilter_ = ""; /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The assetGroupListingGroupFilter. */ public java.lang.String getAssetGroupListingGroupFilter() { java.lang.Object ref = assetGroupListingGroupFilter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); assetGroupListingGroupFilter_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for assetGroupListingGroupFilter. */ public com.google.protobuf.ByteString getAssetGroupListingGroupFilterBytes() { java.lang.Object ref = assetGroupListingGroupFilter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); assetGroupListingGroupFilter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The assetGroupListingGroupFilter to set. * @return This builder for chaining. */ public Builder setAssetGroupListingGroupFilter( java.lang.String value) { if (value == null) { throw new NullPointerException(); } assetGroupListingGroupFilter_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearAssetGroupListingGroupFilter() { assetGroupListingGroupFilter_ = getDefaultInstance().getAssetGroupListingGroupFilter(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * <pre> * Output only. The resource name of the asset group listing group filter. * </pre> * * <code>string asset_group_listing_group_filter = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for assetGroupListingGroupFilter to set. * @return This builder for chaining. */ public Builder setAssetGroupListingGroupFilterBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); assetGroupListingGroupFilter_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.AssetGroupProductGroupView) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.AssetGroupProductGroupView) private static final com.google.ads.googleads.v21.resources.AssetGroupProductGroupView DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.AssetGroupProductGroupView(); } public static com.google.ads.googleads.v21.resources.AssetGroupProductGroupView getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AssetGroupProductGroupView> PARSER = new com.google.protobuf.AbstractParser<AssetGroupProductGroupView>() { @java.lang.Override public AssetGroupProductGroupView parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AssetGroupProductGroupView> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AssetGroupProductGroupView> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.resources.AssetGroupProductGroupView getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,025
java-retail/google-cloud-retail/src/test/java/com/google/cloud/retail/v2alpha/CatalogServiceClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.retail.v2alpha; import static com.google.cloud.retail.v2alpha.CatalogServiceClient.ListCatalogsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class CatalogServiceClientTest { private static MockCatalogService mockCatalogService; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private CatalogServiceClient client; @BeforeClass public static void startStaticServer() { mockCatalogService = new MockCatalogService(); mockLocations = new MockLocations(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockCatalogService, mockLocations)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); CatalogServiceSettings settings = CatalogServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = CatalogServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void listCatalogsTest() throws Exception { Catalog responsesElement = Catalog.newBuilder().build(); ListCatalogsResponse expectedResponse = ListCatalogsResponse.newBuilder() .setNextPageToken("") .addAllCatalogs(Arrays.asList(responsesElement)) .build(); mockCatalogService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListCatalogsRequest actualRequest = ((ListCatalogsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listCatalogsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listCatalogs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listCatalogsTest2() throws Exception { Catalog responsesElement = Catalog.newBuilder().build(); ListCatalogsResponse expectedResponse = ListCatalogsResponse.newBuilder() .setNextPageToken("") .addAllCatalogs(Arrays.asList(responsesElement)) .build(); mockCatalogService.addResponse(expectedResponse); String parent = "parent-995424086"; ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent); List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListCatalogsRequest actualRequest = ((ListCatalogsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listCatalogsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String parent = "parent-995424086"; client.listCatalogs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateCatalogTest() throws Exception { Catalog expectedResponse = Catalog.newBuilder() .setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setDisplayName("displayName1714148973") .setProductLevelConfig(ProductLevelConfig.newBuilder().build()) .setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build()) .build(); mockCatalogService.addResponse(expectedResponse); Catalog catalog = Catalog.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); Catalog actualResponse = client.updateCatalog(catalog, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateCatalogRequest actualRequest = ((UpdateCatalogRequest) actualRequests.get(0)); Assert.assertEquals(catalog, actualRequest.getCatalog()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateCatalogExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { Catalog catalog = Catalog.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateCatalog(catalog, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setDefaultBranchTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockCatalogService.addResponse(expectedResponse); CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.setDefaultBranch(catalog); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetDefaultBranchRequest actualRequest = ((SetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog.toString(), actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setDefaultBranchExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.setDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void setDefaultBranchTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockCatalogService.addResponse(expectedResponse); String catalog = "catalog555704345"; client.setDefaultBranch(catalog); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SetDefaultBranchRequest actualRequest = ((SetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog, actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void setDefaultBranchExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String catalog = "catalog555704345"; client.setDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDefaultBranchTest() throws Exception { GetDefaultBranchResponse expectedResponse = GetDefaultBranchResponse.newBuilder() .setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString()) .setSetTime(Timestamp.newBuilder().build()) .setNote("note3387378") .build(); mockCatalogService.addResponse(expectedResponse); CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDefaultBranchRequest actualRequest = ((GetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog.toString(), actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDefaultBranchExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.getDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDefaultBranchTest2() throws Exception { GetDefaultBranchResponse expectedResponse = GetDefaultBranchResponse.newBuilder() .setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString()) .setSetTime(Timestamp.newBuilder().build()) .setNote("note3387378") .build(); mockCatalogService.addResponse(expectedResponse); String catalog = "catalog555704345"; GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDefaultBranchRequest actualRequest = ((GetDefaultBranchRequest) actualRequests.get(0)); Assert.assertEquals(catalog, actualRequest.getCatalog()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDefaultBranchExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String catalog = "catalog555704345"; client.getDefaultBranch(catalog); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getCompletionConfigTest() throws Exception { CompletionConfig expectedResponse = CompletionConfig.newBuilder() .setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setMatchingOrder("matchingOrder-1366761135") .setMaxSuggestions(618824852) .setMinPrefixLength(96853510) .setAutoLearning(true) .setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751") .setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastDenylistImportOperation("lastDenylistImportOperation1262341570") .setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689") .build(); mockCatalogService.addResponse(expectedResponse); CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); CompletionConfig actualResponse = client.getCompletionConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetCompletionConfigRequest actualRequest = ((GetCompletionConfigRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getCompletionConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.getCompletionConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getCompletionConfigTest2() throws Exception { CompletionConfig expectedResponse = CompletionConfig.newBuilder() .setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setMatchingOrder("matchingOrder-1366761135") .setMaxSuggestions(618824852) .setMinPrefixLength(96853510) .setAutoLearning(true) .setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751") .setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastDenylistImportOperation("lastDenylistImportOperation1262341570") .setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689") .build(); mockCatalogService.addResponse(expectedResponse); String name = "name3373707"; CompletionConfig actualResponse = client.getCompletionConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetCompletionConfigRequest actualRequest = ((GetCompletionConfigRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getCompletionConfigExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String name = "name3373707"; client.getCompletionConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateCompletionConfigTest() throws Exception { CompletionConfig expectedResponse = CompletionConfig.newBuilder() .setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setMatchingOrder("matchingOrder-1366761135") .setMaxSuggestions(618824852) .setMinPrefixLength(96853510) .setAutoLearning(true) .setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751") .setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastDenylistImportOperation("lastDenylistImportOperation1262341570") .setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build()) .setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689") .build(); mockCatalogService.addResponse(expectedResponse); CompletionConfig completionConfig = CompletionConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); CompletionConfig actualResponse = client.updateCompletionConfig(completionConfig, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateCompletionConfigRequest actualRequest = ((UpdateCompletionConfigRequest) actualRequests.get(0)); Assert.assertEquals(completionConfig, actualRequest.getCompletionConfig()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateCompletionConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { CompletionConfig completionConfig = CompletionConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateCompletionConfig(completionConfig, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getAttributesConfigTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); AttributesConfig actualResponse = client.getAttributesConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetAttributesConfigRequest actualRequest = ((GetAttributesConfigRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getAttributesConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]"); client.getAttributesConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getAttributesConfigTest2() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); String name = "name3373707"; AttributesConfig actualResponse = client.getAttributesConfig(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetAttributesConfigRequest actualRequest = ((GetAttributesConfigRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getAttributesConfigExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { String name = "name3373707"; client.getAttributesConfig(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void updateAttributesConfigTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); AttributesConfig attributesConfig = AttributesConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); AttributesConfig actualResponse = client.updateAttributesConfig(attributesConfig, updateMask); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateAttributesConfigRequest actualRequest = ((UpdateAttributesConfigRequest) actualRequests.get(0)); Assert.assertEquals(attributesConfig, actualRequest.getAttributesConfig()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateAttributesConfigExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { AttributesConfig attributesConfig = AttributesConfig.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateAttributesConfig(attributesConfig, updateMask); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void addCatalogAttributeTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); AddCatalogAttributeRequest request = AddCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .build(); AttributesConfig actualResponse = client.addCatalogAttribute(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); AddCatalogAttributeRequest actualRequest = ((AddCatalogAttributeRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getCatalogAttribute(), actualRequest.getCatalogAttribute()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void addCatalogAttributeExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { AddCatalogAttributeRequest request = AddCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .build(); client.addCatalogAttribute(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void removeCatalogAttributeTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); RemoveCatalogAttributeRequest request = RemoveCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setKey("key106079") .build(); AttributesConfig actualResponse = client.removeCatalogAttribute(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RemoveCatalogAttributeRequest actualRequest = ((RemoveCatalogAttributeRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getKey(), actualRequest.getKey()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void removeCatalogAttributeExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { RemoveCatalogAttributeRequest request = RemoveCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setKey("key106079") .build(); client.removeCatalogAttribute(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void batchRemoveCatalogAttributesTest() throws Exception { BatchRemoveCatalogAttributesResponse expectedResponse = BatchRemoveCatalogAttributesResponse.newBuilder() .addAllDeletedCatalogAttributes(new ArrayList<String>()) .addAllResetCatalogAttributes(new ArrayList<String>()) .build(); mockCatalogService.addResponse(expectedResponse); BatchRemoveCatalogAttributesRequest request = BatchRemoveCatalogAttributesRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .addAllAttributeKeys(new ArrayList<String>()) .build(); BatchRemoveCatalogAttributesResponse actualResponse = client.batchRemoveCatalogAttributes(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); BatchRemoveCatalogAttributesRequest actualRequest = ((BatchRemoveCatalogAttributesRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getAttributeKeysList(), actualRequest.getAttributeKeysList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void batchRemoveCatalogAttributesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { BatchRemoveCatalogAttributesRequest request = BatchRemoveCatalogAttributesRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .addAllAttributeKeys(new ArrayList<String>()) .build(); client.batchRemoveCatalogAttributes(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void replaceCatalogAttributeTest() throws Exception { AttributesConfig expectedResponse = AttributesConfig.newBuilder() .setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .putAllCatalogAttributes(new HashMap<String, CatalogAttribute>()) .setAttributeConfigLevel(AttributeConfigLevel.forNumber(0)) .build(); mockCatalogService.addResponse(expectedResponse); ReplaceCatalogAttributeRequest request = ReplaceCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); AttributesConfig actualResponse = client.replaceCatalogAttribute(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockCatalogService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ReplaceCatalogAttributeRequest actualRequest = ((ReplaceCatalogAttributeRequest) actualRequests.get(0)); Assert.assertEquals(request.getAttributesConfig(), actualRequest.getAttributesConfig()); Assert.assertEquals(request.getCatalogAttribute(), actualRequest.getCatalogAttribute()); Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void replaceCatalogAttributeExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockCatalogService.addException(exception); try { ReplaceCatalogAttributeRequest request = ReplaceCatalogAttributeRequest.newBuilder() .setAttributesConfig( AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString()) .setCatalogAttribute(CatalogAttribute.newBuilder().build()) .setUpdateMask(FieldMask.newBuilder().build()) .build(); client.replaceCatalogAttribute(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
apache/ofbiz
35,018
framework/service/src/main/java/org/apache/ofbiz/service/ModelServiceReader.java
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.service; import java.io.IOException; import java.io.Serializable; import java.net.URL; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; import org.apache.ofbiz.base.config.GenericConfigException; import org.apache.ofbiz.base.config.ResourceHandler; import org.apache.ofbiz.base.metrics.MetricsFactory; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.GeneralException; import org.apache.ofbiz.base.util.UtilTimer; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.base.util.UtilXml; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.model.ModelEntity; import org.apache.ofbiz.entity.model.ModelField; import org.apache.ofbiz.entity.model.ModelFieldType; import org.apache.ofbiz.service.ModelParam.ModelParamValidator; import org.apache.ofbiz.service.group.GroupModel; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Generic Service - Service Definition Reader */ @SuppressWarnings("serial") public class ModelServiceReader implements Serializable { public static final String module = ModelServiceReader.class.getName(); /** is either from a URL or from a ResourceLoader (through the ResourceHandler) */ protected boolean isFromURL; protected URL readerURL = null; protected ResourceHandler handler = null; protected Delegator delegator = null; public static Map<String, ModelService> getModelServiceMap(URL readerURL, Delegator delegator) { if (readerURL == null) { Debug.logError("Cannot add reader with a null reader URL", module); return null; } ModelServiceReader reader = new ModelServiceReader(true, readerURL, null, delegator); return reader.getModelServices(); } public static Map<String, ModelService> getModelServiceMap(ResourceHandler handler, Delegator delegator) { ModelServiceReader reader = new ModelServiceReader(false, null, handler, delegator); return reader.getModelServices(); } private ModelServiceReader(boolean isFromURL, URL readerURL, ResourceHandler handler, Delegator delegator) { this.isFromURL = isFromURL; this.readerURL = readerURL; this.handler = handler; this.delegator = delegator; } private Map<String, ModelService> getModelServices() { UtilTimer utilTimer = new UtilTimer(); Document document; if (this.isFromURL) { // utilTimer.timerString("Before getDocument in file " + readerURL); document = getDocument(readerURL); if (document == null) { return null; } } else { // utilTimer.timerString("Before getDocument in " + handler); try { document = handler.getDocument(); } catch (GenericConfigException e) { Debug.logError(e, "Error getting XML document from resource", module); return null; } } Map<String, ModelService> modelServices = new HashMap<String, ModelService>(); if (this.isFromURL) {// utilTimer.timerString("Before getDocumentElement in file " + readerURL); } else {// utilTimer.timerString("Before getDocumentElement in " + handler); } Element docElement = document.getDocumentElement(); if (docElement == null) { return null; } docElement.normalize(); String resourceLocation = handler.getLocation(); try { resourceLocation = handler.getURL().toExternalForm(); } catch (GenericConfigException e) { Debug.logError(e, "Could not get resource URL", module); } int i = 0; Node curChild = docElement.getFirstChild(); if (curChild != null) { if (this.isFromURL) { utilTimer.timerString("Before start of service loop in file " + readerURL); } else { utilTimer.timerString("Before start of service loop in " + handler); } do { if (curChild.getNodeType() == Node.ELEMENT_NODE && "service".equals(curChild.getNodeName())) { i++; Element curServiceElement = (Element) curChild; String serviceName = UtilXml.checkEmpty(curServiceElement.getAttribute("name")); // check to see if service with same name has already been read if (modelServices.containsKey(serviceName)) { Debug.logWarning("Service " + serviceName + " is defined more than once, " + "most recent will over-write previous definition(s)", module); } // utilTimer.timerString(" After serviceName -- " + i + " --"); ModelService service = createModelService(curServiceElement, resourceLocation); // utilTimer.timerString(" After createModelService -- " + i + " --"); if (service != null) { modelServices.put(serviceName, service); // utilTimer.timerString(" After modelServices.put -- " + i + " --"); /* int reqIn = service.getParameterNames(ModelService.IN_PARAM, false).size(); int optIn = service.getParameterNames(ModelService.IN_PARAM, true).size() - reqIn; int reqOut = service.getParameterNames(ModelService.OUT_PARAM, false).size(); int optOut = service.getParameterNames(ModelService.OUT_PARAM, true).size() - reqOut; if (Debug.verboseOn()) { String msg = "-- getModelService: # " + i + " Loaded service: " + serviceName + " (IN) " + reqIn + "/" + optIn + " (OUT) " + reqOut + "/" + optOut; Debug.logVerbose(msg, module); } */ } else { Debug.logWarning( "-- -- SERVICE ERROR:getModelService: Could not create service for serviceName: " + serviceName, module); } } } while ((curChild = curChild.getNextSibling()) != null); } else { Debug.logWarning("No child nodes found.", module); } if (this.isFromURL) { utilTimer.timerString("Finished file " + readerURL + " - Total Services: " + i + " FINISHED"); Debug.logInfo("Loaded [" + i + "] Services from " + readerURL, module); } else { utilTimer.timerString("Finished document in " + handler + " - Total Services: " + i + " FINISHED"); if (Debug.infoOn()) { Debug.logInfo("Loaded [" + i + "] Services from " + resourceLocation, module); } } return modelServices; } private ModelService createModelService(Element serviceElement, String resourceLocation) { ModelService service = new ModelService(); service.name = UtilXml.checkEmpty(serviceElement.getAttribute("name")).intern(); service.definitionLocation = resourceLocation; service.engineName = UtilXml.checkEmpty(serviceElement.getAttribute("engine")).intern(); service.location = UtilXml.checkEmpty(serviceElement.getAttribute("location")).intern(); service.invoke = UtilXml.checkEmpty(serviceElement.getAttribute("invoke")).intern(); service.semaphore = UtilXml.checkEmpty(serviceElement.getAttribute("semaphore")).intern(); service.defaultEntityName = UtilXml.checkEmpty(serviceElement.getAttribute("default-entity-name")).intern(); service.fromLoader = isFromURL ? readerURL.toExternalForm() : handler.getLoaderName(); // these default to true; if anything but true, make false service.auth = "true".equalsIgnoreCase(serviceElement.getAttribute("auth")); service.export = "true".equalsIgnoreCase(serviceElement.getAttribute("export")); service.debug = "true".equalsIgnoreCase(serviceElement.getAttribute("debug")); // these defaults to false; if anything but false, make it true service.validate = !"false".equalsIgnoreCase(serviceElement.getAttribute("validate")); service.useTransaction = !"false".equalsIgnoreCase(serviceElement.getAttribute("use-transaction")); service.requireNewTransaction = !"false".equalsIgnoreCase(serviceElement.getAttribute("require-new-transaction")); if (service.requireNewTransaction && !service.useTransaction) { // requireNewTransaction implies that a transaction is used service.useTransaction = true; Debug.logWarning("In service definition [" + service.name + "] the value use-transaction has been changed from false to true as required when require-new-transaction is set to true", module); } service.hideResultInLog = !"false".equalsIgnoreCase(serviceElement.getAttribute("hideResultInLog")); // set the semaphore sleep/wait times String semaphoreWaitStr = UtilXml.checkEmpty(serviceElement.getAttribute("semaphore-wait-seconds")); int semaphoreWait = 300; if (UtilValidate.isNotEmpty(semaphoreWaitStr)) { try { semaphoreWait = Integer.parseInt(semaphoreWaitStr); } catch (NumberFormatException e) { Debug.logWarning(e, "Setting semaphore-wait to 5 minutes (default)", module); semaphoreWait = 300; } } service.semaphoreWait = semaphoreWait; String semaphoreSleepStr = UtilXml.checkEmpty(serviceElement.getAttribute("semaphore-sleep")); int semaphoreSleep = 500; if (UtilValidate.isNotEmpty(semaphoreSleepStr)) { try { semaphoreSleep = Integer.parseInt(semaphoreSleepStr); } catch (NumberFormatException e) { Debug.logWarning(e, "Setting semaphore-sleep to 1/2 second (default)", module); semaphoreSleep = 500; } } service.semaphoreSleep = semaphoreSleep; // set the max retry field String maxRetryStr = UtilXml.checkEmpty(serviceElement.getAttribute("max-retry")); int maxRetry = -1; if (UtilValidate.isNotEmpty(maxRetryStr)) { try { maxRetry = Integer.parseInt(maxRetryStr); } catch (NumberFormatException e) { Debug.logWarning(e, "Setting maxRetry to -1 (default)", module); maxRetry = -1; } } service.maxRetry = maxRetry; // get the timeout and convert to int String timeoutStr = UtilXml.checkEmpty(serviceElement.getAttribute("transaction-timeout"), serviceElement.getAttribute("transaction-timout")); int timeout = 0; if (UtilValidate.isNotEmpty(timeoutStr)) { try { timeout = Integer.parseInt(timeoutStr); } catch (NumberFormatException e) { Debug.logWarning(e, "Setting timeout to 0 (default)", module); timeout = 0; } } service.transactionTimeout = timeout; service.description = getCDATADef(serviceElement, "description"); service.nameSpace = getCDATADef(serviceElement, "namespace"); // construct the context service.contextInfo = new HashMap<String, ModelParam>(); this.createNotification(serviceElement, service); this.createPermission(serviceElement, service); this.createPermGroups(serviceElement, service); this.createGroupDefs(serviceElement, service); this.createImplDefs(serviceElement, service); this.createAutoAttrDefs(serviceElement, service); this.createAttrDefs(serviceElement, service); this.createOverrideDefs(serviceElement, service); // Get metrics. Element metricsElement = UtilXml.firstChildElement(serviceElement, "metric"); if (metricsElement != null) { service.metrics = MetricsFactory.getInstance(metricsElement); } return service; } private String getCDATADef(Element baseElement, String tagName) { String value = ""; NodeList nl = baseElement.getElementsByTagName(tagName); // if there are more then one decriptions we will use only the first one if (nl.getLength() > 0) { Node n = nl.item(0); NodeList childNodes = n.getChildNodes(); if (childNodes.getLength() > 0) { Node cdata = childNodes.item(0); value = UtilXml.checkEmpty(cdata.getNodeValue()); } } return value; } private void createNotification(Element baseElement, ModelService model) { List<? extends Element> n = UtilXml.childElementList(baseElement, "notification"); // default notification groups ModelNotification nSuccess = new ModelNotification(); nSuccess.notificationEvent = "success"; nSuccess.notificationGroupName = "default.success." + model.fromLoader; model.notifications.add(nSuccess); ModelNotification nFail = new ModelNotification(); nFail.notificationEvent = "fail"; nFail.notificationGroupName = "default.fail." + model.fromLoader; model.notifications.add(nFail); ModelNotification nError = new ModelNotification(); nError.notificationEvent = "error"; nError.notificationGroupName = "default.error." + model.fromLoader; model.notifications.add(nError); if (n != null) { for (Element e: n) { ModelNotification notify = new ModelNotification(); notify.notificationEvent = e.getAttribute("event"); notify.notificationGroupName = e.getAttribute("group"); model.notifications.add(notify); } } } private void createPermission(Element baseElement, ModelService model) { Element e = UtilXml.firstChildElement(baseElement, "permission-service"); if (e != null) { model.permissionServiceName = e.getAttribute("service-name"); model.permissionMainAction = e.getAttribute("main-action"); model.permissionResourceDesc = e.getAttribute("resource-description"); model.auth = true; // auth is always required when permissions are set } } private void createPermGroups(Element baseElement, ModelService model) { for (Element element: UtilXml.childElementList(baseElement, "required-permissions")) { ModelPermGroup group = new ModelPermGroup(); group.joinType = element.getAttribute("join-type"); createGroupPermissions(element, group, model); model.permissionGroups.add(group); } } private void createGroupPermissions(Element baseElement, ModelPermGroup group, ModelService service) { // create the simple permissions for (Element element: UtilXml.childElementList(baseElement, "check-permission")) { ModelPermission perm = new ModelPermission(); perm.nameOrRole = element.getAttribute("permission").intern(); perm.action = element.getAttribute("action").intern(); if (UtilValidate.isNotEmpty(perm.action)) { perm.permissionType = ModelPermission.ENTITY_PERMISSION; } else { perm.permissionType = ModelPermission.PERMISSION; } perm.serviceModel = service; group.permissions.add(perm); } // create the role member permissions for (Element element: UtilXml.childElementList(baseElement, "check-role-member")) { ModelPermission perm = new ModelPermission(); perm.permissionType = ModelPermission.ROLE_MEMBER; perm.nameOrRole = element.getAttribute("role-type").intern(); perm.serviceModel = service; group.permissions.add(perm); } // Create the permissions based on permission services for (Element element : UtilXml.childElementList(baseElement, "permission-service")) { ModelPermission perm = new ModelPermission(); if (baseElement != null) { perm.permissionType = ModelPermission.PERMISSION_SERVICE; perm.permissionServiceName = element.getAttribute("service-name"); perm.action = element.getAttribute("main-action"); perm.permissionResourceDesc = element.getAttribute("resource-description"); perm.auth = true; // auth is always required when permissions are set perm.serviceModel = service; group.permissions.add(perm); } } } private void createGroupDefs(Element baseElement, ModelService service) { List<? extends Element> group = UtilXml.childElementList(baseElement, "group"); if (UtilValidate.isNotEmpty(group)) { Element groupElement = group.get(0); groupElement.setAttribute("name", "_" + service.name + ".group"); service.internalGroup = new GroupModel(groupElement); service.invoke = service.internalGroup.getGroupName(); if (Debug.verboseOn()) Debug.logVerbose("Created INTERNAL GROUP model [" + service.internalGroup + "]", module); } } private void createImplDefs(Element baseElement, ModelService service) { for (Element implement: UtilXml.childElementList(baseElement, "implements")) { String serviceName = UtilXml.checkEmpty(implement.getAttribute("service")).intern(); boolean optional = UtilXml.checkBoolean(implement.getAttribute("optional"), false); if (serviceName.length() > 0) service.implServices.add(new ModelServiceIface(serviceName, optional)); //service.implServices.add(serviceName); } } private void createAutoAttrDefs(Element baseElement, ModelService service) { for (Element element: UtilXml.childElementList(baseElement, "auto-attributes")) { createAutoAttrDef(element, service); } } private void createAutoAttrDef(Element autoElement, ModelService service) { // get the entity name; first from the auto-attributes then from the service def String entityName = UtilXml.checkEmpty(autoElement.getAttribute("entity-name")); if (UtilValidate.isEmpty(entityName)) { entityName = service.defaultEntityName; if (UtilValidate.isEmpty(entityName)) { Debug.logWarning("Auto-Attribute does not specify an entity-name; not default-entity on service definition", module); } } // get the include type 'pk|nonpk|all' String includeType = UtilXml.checkEmpty(autoElement.getAttribute("include")); boolean includePk = "pk".equals(includeType) || "all".equals(includeType); boolean includeNonPk = "nonpk".equals(includeType) || "all".equals(includeType); if (delegator == null) { Debug.logWarning("Cannot use auto-attribute fields with a null delegator", module); } if (delegator != null && entityName != null) { Map<String, ModelParam> modelParamMap = new LinkedHashMap<String, ModelParam>(); try { ModelEntity entity = delegator.getModelEntity(entityName); if (entity == null) { throw new GeneralException("Could not find entity with name [" + entityName + "]"); } Iterator<ModelField> fieldsIter = entity.getFieldsIterator(); if (fieldsIter != null) { while (fieldsIter.hasNext()) { ModelField field = fieldsIter.next(); if ((!field.getIsAutoCreatedInternal()) && ((field.getIsPk() && includePk) || (!field.getIsPk() && includeNonPk))) { ModelFieldType fieldType = delegator.getEntityFieldType(entity, field.getType()); if (fieldType == null) { throw new GeneralException("Null field type from delegator for entity [" + entityName + "]"); } ModelParam param = new ModelParam(); param.entityName = entityName; param.fieldName = field.getName(); param.name = field.getName(); param.type = fieldType.getJavaType(); // this is a special case where we use something different in the service layer than we do in the entity/data layer if ("java.sql.Blob".equals(param.type)) { param.type = "java.nio.ByteBuffer"; } param.mode = UtilXml.checkEmpty(autoElement.getAttribute("mode")).intern(); param.optional = "true".equalsIgnoreCase(autoElement.getAttribute("optional")); // default to true param.formDisplay = !"false".equalsIgnoreCase(autoElement.getAttribute("form-display")); // default to false param.allowHtml = UtilXml.checkEmpty(autoElement.getAttribute("allow-html"), "none").intern(); // default to none modelParamMap.put(field.getName(), param); } } // get the excludes list; and remove those from the map List<? extends Element> excludes = UtilXml.childElementList(autoElement, "exclude"); if (excludes != null) { for (Element exclude: excludes) { modelParamMap.remove(UtilXml.checkEmpty(exclude.getAttribute("field-name"))); } } // now add in all the remaining params for (ModelParam thisParam: modelParamMap.values()) { //Debug.logInfo("Adding Param to " + service.name + ": " + thisParam.name + " [" + thisParam.mode + "] " + thisParam.type + " (" + thisParam.optional + ")", module); service.addParam(thisParam); } } } catch (GenericEntityException e) { Debug.logError(e, "Problem loading auto-attributes [" + entityName + "] for " + service.name, module); } catch (GeneralException e) { Debug.logError(e, "Cannot load auto-attributes : " + e.getMessage() + " for " + service.name, module); } } } private void createAttrDefs(Element baseElement, ModelService service) { // Add in the defined attributes (override the above defaults if specified) for (Element attribute: UtilXml.childElementList(baseElement, "attribute")) { ModelParam param = new ModelParam(); param.name = UtilXml.checkEmpty(attribute.getAttribute("name")).intern(); param.description = getCDATADef(attribute, "description"); param.type = UtilXml.checkEmpty(attribute.getAttribute("type")).intern(); param.mode = UtilXml.checkEmpty(attribute.getAttribute("mode")).intern(); param.entityName = UtilXml.checkEmpty(attribute.getAttribute("entity-name")).intern(); param.fieldName = UtilXml.checkEmpty(attribute.getAttribute("field-name")).intern(); param.requestAttributeName = UtilXml.checkEmpty(attribute.getAttribute("request-attribute-name")).intern(); param.sessionAttributeName = UtilXml.checkEmpty(attribute.getAttribute("session-attribute-name")).intern(); param.stringMapPrefix = UtilXml.checkEmpty(attribute.getAttribute("string-map-prefix")).intern(); param.stringListSuffix = UtilXml.checkEmpty(attribute.getAttribute("string-list-suffix")).intern(); param.formLabel = attribute.hasAttribute("form-label")?attribute.getAttribute("form-label").intern():null; param.optional = "true".equalsIgnoreCase(attribute.getAttribute("optional")); // default to true param.formDisplay = !"false".equalsIgnoreCase(attribute.getAttribute("form-display")); // default to false param.allowHtml = UtilXml.checkEmpty(attribute.getAttribute("allow-html"), "none").intern(); // default to none // default value String defValue = attribute.getAttribute("default-value"); if (UtilValidate.isNotEmpty(defValue)) { if (Debug.verboseOn()) Debug.logVerbose("Got a default-value [" + defValue + "] for service attribute [" + service.name + "." + param.name + "]", module); param.setDefaultValue(defValue.intern()); } // set the entity name to the default if not specified if (param.entityName.length() == 0) { param.entityName = service.defaultEntityName; } // set the field-name to the name if entity name is specified but no field-name if (param.fieldName.length() == 0 && param.entityName.length() > 0) { param.fieldName = param.name; } // set the validators this.addValidators(attribute, param); service.addParam(param); } // Add the default optional parameters ModelParam def; // responseMessage def = new ModelParam(); def.name = ModelService.RESPONSE_MESSAGE; def.type = "String"; def.mode = "OUT"; def.optional = true; def.internal = true; service.addParam(def); // errorMessage def = new ModelParam(); def.name = ModelService.ERROR_MESSAGE; def.type = "String"; def.mode = "OUT"; def.optional = true; def.internal = true; service.addParam(def); // errorMessageList def = new ModelParam(); def.name = ModelService.ERROR_MESSAGE_LIST; def.type = "java.util.List"; def.mode = "OUT"; def.optional = true; def.internal = true; service.addParam(def); // successMessage def = new ModelParam(); def.name = ModelService.SUCCESS_MESSAGE; def.type = "String"; def.mode = "OUT"; def.optional = true; def.internal = true; service.addParam(def); // successMessageList def = new ModelParam(); def.name = ModelService.SUCCESS_MESSAGE_LIST; def.type = "java.util.List"; def.mode = "OUT"; def.optional = true; def.internal = true; service.addParam(def); // userLogin def = new ModelParam(); def.name = "userLogin"; def.type = "org.apache.ofbiz.entity.GenericValue"; def.mode = "INOUT"; def.optional = true; def.internal = true; service.addParam(def); // login.username def = new ModelParam(); def.name = "login.username"; def.type = "String"; def.mode = "IN"; def.optional = true; def.internal = true; service.addParam(def); // login.password def = new ModelParam(); def.name = "login.password"; def.type = "String"; def.mode = "IN"; def.optional = true; def.internal = true; service.addParam(def); // Locale def = new ModelParam(); def.name = "locale"; def.type = "java.util.Locale"; def.mode = "INOUT"; def.optional = true; def.internal = true; service.addParam(def); // timeZone def = new ModelParam(); def.name = "timeZone"; def.type = "java.util.TimeZone"; def.mode = "INOUT"; def.optional = true; def.internal = true; service.addParam(def); } private void createOverrideDefs(Element baseElement, ModelService service) { for (Element overrideElement: UtilXml.childElementList(baseElement, "override")) { String name = UtilXml.checkEmpty(overrideElement.getAttribute("name")); ModelParam param = service.getParam(name); boolean directToParams = true; if (param == null) { if (!service.inheritedParameters && (service.implServices.size() > 0 || "group".equals(service.engineName))) { // create a temp def to place in the ModelService // this will get read when we read implemented services directToParams = false; param = new ModelParam(); param.name = name; } else { Debug.logWarning("No parameter found for override parameter named: " + name + " in service " + service.name, module); } } if (param != null) { // set only modified values if (UtilValidate.isNotEmpty(overrideElement.getAttribute("type"))) { param.type = UtilXml.checkEmpty(overrideElement.getAttribute("type")).intern(); } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("mode"))) { param.mode = UtilXml.checkEmpty(overrideElement.getAttribute("mode")).intern(); } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("entity-name"))) { param.entityName = UtilXml.checkEmpty(overrideElement.getAttribute("entity-name")).intern(); } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("field-name"))) { param.fieldName = UtilXml.checkEmpty(overrideElement.getAttribute("field-name")).intern(); } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("form-label"))) { param.formLabel = UtilXml.checkEmpty(overrideElement.getAttribute("form-label")).intern(); } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("optional"))) { param.optional = "true".equalsIgnoreCase(overrideElement.getAttribute("optional")); // default to true param.overrideOptional = true; } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("form-display"))) { param.formDisplay = !"false".equalsIgnoreCase(overrideElement.getAttribute("form-display")); // default to false param.overrideFormDisplay = true; } if (UtilValidate.isNotEmpty(overrideElement.getAttribute("allow-html"))) { param.allowHtml = UtilXml.checkEmpty(overrideElement.getAttribute("allow-html")).intern(); } // default value String defValue = overrideElement.getAttribute("default-value"); if (UtilValidate.isNotEmpty(defValue)) { param.setDefaultValue(defValue); } // override validators this.addValidators(overrideElement, param); if (directToParams) { service.addParam(param); } else { service.overrideParameters.add(param); } } } } private void addValidators(Element attribute, ModelParam param) { List<? extends Element> validateElements = UtilXml.childElementList(attribute, "type-validate"); if (UtilValidate.isNotEmpty(validateElements)) { // always clear out old ones; never append param.validators = new LinkedList<ModelParamValidator>(); Element validate = validateElements.get(0); String methodName = validate.getAttribute("method").intern(); String className = validate.getAttribute("class").intern(); Element fail = UtilXml.firstChildElement(validate, "fail-message"); if (fail != null) { String message = fail.getAttribute("message").intern(); param.addValidator(className, methodName, message); } else { fail = UtilXml.firstChildElement(validate, "fail-property"); if (fail != null) { String resource = fail.getAttribute("resource").intern(); String property = fail.getAttribute("property").intern(); param.addValidator(className, methodName, resource, property); } } } } private Document getDocument(URL url) { if (url == null) return null; Document document = null; try { document = UtilXml.readXmlDocument(url, true, true); } catch (SAXException sxe) { // Error generated during parsing) Exception x = sxe; if (sxe.getException() != null) x = sxe.getException(); x.printStackTrace(); } catch (ParserConfigurationException pce) { // Parser with specified options can't be built pce.printStackTrace(); } catch (IOException ioe) { ioe.printStackTrace(); } return document; } }
apache/ozone
35,324
hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestContainerReconciliationWithMockDatanodes.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.container.keyvalue; import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS; import static org.apache.hadoop.hdds.HddsUtils.checksumToString; import static org.apache.hadoop.hdds.protocol.MockDatanodeDetails.randomDatanodeDetails; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_DATANODE_DIR_KEY; import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.verifyAllDataChecksumsMatch; import static org.apache.hadoop.ozone.container.common.ContainerTestUtils.WRITE_STAGE; import static org.apache.hadoop.ozone.container.common.ContainerTestUtils.createDbInstancesForTestIfNeeded; import static org.apache.hadoop.ozone.container.common.impl.ContainerImplTestUtils.newContainerSet; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Random; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.commons.text.RandomStringGenerator; import org.apache.hadoop.hdds.client.BlockID; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls; import org.apache.hadoop.hdds.utils.db.BatchOperation; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.common.Checksum; import org.apache.hadoop.ozone.common.ChecksumData; import org.apache.hadoop.ozone.container.checksum.ContainerChecksumTreeManager; import org.apache.hadoop.ozone.container.checksum.DNContainerOperationClient; import org.apache.hadoop.ozone.container.common.ContainerTestUtils; import org.apache.hadoop.ozone.container.common.helpers.BlockData; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.impl.ContainerSet; import org.apache.hadoop.ozone.container.common.interfaces.DBHandle; import org.apache.hadoop.ozone.container.common.volume.MutableVolumeSet; import org.apache.hadoop.ozone.container.common.volume.StorageVolume; import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils; import org.apache.hadoop.ozone.container.ozoneimpl.ContainerController; import org.apache.hadoop.ozone.container.ozoneimpl.ContainerScannerConfiguration; import org.apache.hadoop.ozone.container.ozoneimpl.OnDemandContainerScanner; import org.apache.ozone.test.GenericTestUtils; import org.apache.ratis.thirdparty.com.google.protobuf.ByteString; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This unit test simulates three datanodes with replicas of a container that need to be reconciled. * It creates three KeyValueHandler instances to represent each datanode, and each instance is working on a container * replica that is stored in a local directory. The reconciliation client is mocked to return the corresponding local * container for each datanode peer. */ public class TestContainerReconciliationWithMockDatanodes { public static final Logger LOG = LoggerFactory.getLogger(TestContainerReconciliationWithMockDatanodes.class); // All container replicas will be placed in this directory, and the same replicas will be re-used for each test run. @TempDir private static Path containerDir; private static DNContainerOperationClient dnClient; private static MockedStatic<ContainerProtocolCalls> containerProtocolMock; private static List<MockDatanode> datanodes; private static long healthyDataChecksum; private static final String CLUSTER_ID = UUID.randomUUID().toString(); private static final long CONTAINER_ID = 100L; private static final int CHUNK_LEN = 3 * (int) OzoneConsts.KB; private static final int CHUNKS_PER_BLOCK = 4; private static final int NUM_DATANODES = 3; private static final String TEST_SCAN = "Test Scan"; /** * Number of corrupt blocks and chunks. * * TODO HDDS-11942 support more combinations of corruptions. */ public static Stream<Arguments> corruptionValues() { return Stream.of( Arguments.of(5, 0), Arguments.of(0, 5), Arguments.of(0, 10), Arguments.of(10, 0), Arguments.of(5, 10), Arguments.of(10, 5), Arguments.of(2, 3), Arguments.of(3, 2), Arguments.of(4, 6), Arguments.of(6, 4), Arguments.of(6, 9), Arguments.of(9, 6) ); } /** * Use the same container instances throughout the tests. Each reconciliation should make a full repair, resetting * the state for the next test. */ @BeforeAll public static void setup() throws Exception { LOG.info("Data written to {}", containerDir); dnClient = new DNContainerOperationClient(new OzoneConfiguration(), null, null); datanodes = new ArrayList<>(); // Create a container with 15 blocks and 3 replicas. for (int i = 0; i < NUM_DATANODES; i++) { DatanodeDetails dnDetails = randomDatanodeDetails(); // Use this fake host name to track the node through the test since it's easier to visualize than a UUID. dnDetails.setHostName("dn" + (i + 1)); MockDatanode dn = new MockDatanode(dnDetails, containerDir); // This will close the container and build a data checksum based on the chunk checksums in the metadata. dn.addContainerWithBlocks(CONTAINER_ID, 15); datanodes.add(dn); } long dataChecksumFromMetadata = assertUniqueChecksumCount(CONTAINER_ID, datanodes, 1); assertNotEquals(0, dataChecksumFromMetadata); datanodes.forEach(d -> d.scanContainer(CONTAINER_ID)); healthyDataChecksum = assertUniqueChecksumCount(CONTAINER_ID, datanodes, 1); assertEquals(dataChecksumFromMetadata, healthyDataChecksum); // Do not count the initial synchronous scan to build the merkle tree towards the scan count in the tests. // This lets each test run start counting the number of scans from zero. datanodes.forEach(MockDatanode::resetOnDemandScanCount); containerProtocolMock = Mockito.mockStatic(ContainerProtocolCalls.class); mockContainerProtocolCalls(); } @AfterEach public void reset() { datanodes.forEach(MockDatanode::resetOnDemandScanCount); } @AfterAll public static void teardown() { if (containerProtocolMock != null) { containerProtocolMock.close(); } } @ParameterizedTest @MethodSource("corruptionValues") public void testContainerReconciliation(int numBlocksToRemove, int numChunksToCorrupt) throws Exception { LOG.info("Healthy data checksum for container {} in this test is {}", CONTAINER_ID, checksumToString(healthyDataChecksum)); // Introduce corruption in each container on different replicas. List<MockDatanode> dnsToCorrupt = datanodes.stream().limit(2).collect(Collectors.toList()); dnsToCorrupt.get(0).introduceCorruption(CONTAINER_ID, numBlocksToRemove, numChunksToCorrupt, false); dnsToCorrupt.get(1).introduceCorruption(CONTAINER_ID, numBlocksToRemove, numChunksToCorrupt, true); // Use synchronous on-demand scans to re-build the merkle trees after corruption. datanodes.forEach(d -> d.scanContainer(CONTAINER_ID)); // Without reconciliation, checksums should be different because of the corruption. assertUniqueChecksumCount(CONTAINER_ID, datanodes, 3); // Each datanode should have had one on-demand scan during test setup, and a second one after corruption was // introduced. waitForExpectedScanCount(1); // Reconcile each datanode with its peers. // In a real cluster, SCM will not send a command to reconcile a datanode with itself. for (MockDatanode current : datanodes) { List<DatanodeDetails> peers = datanodes.stream() .map(MockDatanode::getDnDetails) .filter(other -> !current.getDnDetails().equals(other)) .collect(Collectors.toList()); current.reconcileContainerSuccess(dnClient, peers, CONTAINER_ID); } // Reconciliation should have triggered a second on-demand scan for each replica. Wait for them to finish before // checking the results. waitForExpectedScanCount(2); // After reconciliation, checksums should be the same for all containers. long repairedDataChecksum = assertUniqueChecksumCount(CONTAINER_ID, datanodes, 1); assertEquals(healthyDataChecksum, repairedDataChecksum); } /** * Enum to represent different failure modes for container protocol calls. */ public enum FailureLocation { GET_CONTAINER_CHECKSUM_INFO("getContainerChecksumInfo"), GET_BLOCK("getBlock"), READ_CHUNK("readChunk"); private final String methodName; FailureLocation(String methodName) { this.methodName = methodName; } public String getMethodName() { return methodName; } } /** * Provides test parameters for different failure modes. */ public static Stream<Arguments> failureLocations() { return Stream.of( Arguments.of(FailureLocation.GET_CONTAINER_CHECKSUM_INFO), Arguments.of(FailureLocation.GET_BLOCK), Arguments.of(FailureLocation.READ_CHUNK) ); } @ParameterizedTest @MethodSource("failureLocations") public void testContainerReconciliationWithPeerFailure(FailureLocation failureLocation) throws Exception { LOG.info("Testing container reconciliation with peer failure in {} for container {}", failureLocation.getMethodName(), CONTAINER_ID); // Introduce corruption in the first datanode MockDatanode corruptedNode = datanodes.get(0); MockDatanode healthyNode1 = datanodes.get(1); MockDatanode healthyNode2 = datanodes.get(2); corruptedNode.introduceCorruption(CONTAINER_ID, 1, 1, false); // Use synchronous on-demand scans to re-build the merkle trees after corruption. datanodes.forEach(d -> d.scanContainer(CONTAINER_ID)); // Without reconciliation, checksums should be different. assertUniqueChecksumCount(CONTAINER_ID, datanodes, 2); waitForExpectedScanCount(1); // Create a failing peer - we'll make the second datanode fail during the specified operation DatanodeDetails failingPeerDetails = healthyNode1.getDnDetails(); // Mock the failure for the specific method based on the failure mode mockContainerProtocolCalls(failureLocation, failingPeerDetails); // Now reconcile the corrupted node with its peers (including the failing one) List<DatanodeDetails> peers = Arrays.asList(failingPeerDetails, healthyNode2.getDnDetails()); corruptedNode.reconcileContainer(dnClient, peers, CONTAINER_ID); // Wait for scan to complete - but this time we only expect the corrupted node to have a scan // triggered by reconciliation, so we wait specifically for that one try { GenericTestUtils.waitFor(() -> corruptedNode.getOnDemandScanCount() == 2, 100, 5_000); } catch (TimeoutException ex) { LOG.warn("Timed out waiting for on-demand scan after reconciliation. Current count: {}", corruptedNode.getOnDemandScanCount()); } // The corrupted node should still be repaired because it was able to reconcile with the healthy peer // even though one peer failed long repairedDataChecksum = assertUniqueChecksumCount(CONTAINER_ID, datanodes, 1); assertEquals(healthyDataChecksum, repairedDataChecksum); // Restore the original mock behavior for other tests mockContainerProtocolCalls(); } @Test public void testContainerReconciliationFailureContainerScan() throws Exception { // Use synchronous on-demand scans to re-build the merkle trees after corruption. datanodes.forEach(d -> d.scanContainer(CONTAINER_ID)); // Each datanode should have had one on-demand scan during test setup, and a second one after corruption was // introduced. waitForExpectedScanCount(1); for (MockDatanode current : datanodes) { doThrow(IOException.class).when(current.getHandler().getChecksumManager()).read(any()); List<DatanodeDetails> peers = datanodes.stream() .map(MockDatanode::getDnDetails) .filter(other -> !current.getDnDetails().equals(other)) .collect(Collectors.toList()); // Reconciliation should fail for each datanode, since the checksum info cannot be retrieved. assertThrows(IOException.class, () -> current.reconcileContainer(dnClient, peers, CONTAINER_ID)); Mockito.reset(current.getHandler().getChecksumManager()); } // Even failure of Reconciliation should have triggered a second on-demand scan for each replica. waitForExpectedScanCount(2); } /** * Uses the on-demand container scanner metrics to wait for the expected number of on-demand scans to complete on * every datanode. */ private void waitForExpectedScanCount(int expectedCountPerDatanode) throws Exception { for (MockDatanode datanode: datanodes) { try { GenericTestUtils.waitFor(() -> datanode.getOnDemandScanCount() == expectedCountPerDatanode, 100, 10_000); } catch (TimeoutException ex) { LOG.error("Timed out waiting for on-demand scan count {} to reach expected count {} on datanode {}", datanode.getOnDemandScanCount(), expectedCountPerDatanode, datanode); throw ex; } } } /** * Checks for the expected number of unique checksums among a container on the provided datanodes. * @return The data checksum from one of the nodes. Useful if expectedUniqueChecksums = 1. */ private static long assertUniqueChecksumCount(long containerID, Collection<MockDatanode> nodes, long expectedUniqueChecksums) { long actualUniqueChecksums = nodes.stream() .mapToLong(d -> d.checkAndGetDataChecksum(containerID)) .distinct() .count(); assertEquals(expectedUniqueChecksums, actualUniqueChecksums); return nodes.stream().findAny().get().checkAndGetDataChecksum(containerID); } private static void mockContainerProtocolCalls() { // Mock network calls without injecting failures. mockContainerProtocolCalls(null, null); } private static void mockContainerProtocolCalls(FailureLocation failureLocation, DatanodeDetails failingPeerDetails) { Map<DatanodeDetails, MockDatanode> dnMap = datanodes.stream() .collect(Collectors.toMap(MockDatanode::getDnDetails, Function.identity())); // Mock getContainerChecksumInfo containerProtocolMock.when(() -> ContainerProtocolCalls.getContainerChecksumInfo(any(), anyLong(), any())) .thenAnswer(inv -> { XceiverClientSpi xceiverClientSpi = inv.getArgument(0); long containerID = inv.getArgument(1); Pipeline pipeline = xceiverClientSpi.getPipeline(); assertEquals(1, pipeline.size()); DatanodeDetails dn = pipeline.getFirstNode(); if (failureLocation == FailureLocation.GET_CONTAINER_CHECKSUM_INFO && dn.equals(failingPeerDetails)) { throw new IOException("Simulated peer failure for testing in getContainerChecksumInfo"); } return dnMap.get(dn).getChecksumInfo(containerID); }); // Mock getBlock containerProtocolMock.when(() -> ContainerProtocolCalls.getBlock(any(), any(), any(), any(), anyMap())) .thenAnswer(inv -> { XceiverClientSpi xceiverClientSpi = inv.getArgument(0); BlockID blockID = inv.getArgument(2); Pipeline pipeline = xceiverClientSpi.getPipeline(); assertEquals(1, pipeline.size()); DatanodeDetails dn = pipeline.getFirstNode(); if (failureLocation == FailureLocation.GET_BLOCK && dn.equals(failingPeerDetails)) { throw new IOException("Simulated peer failure for testing in getBlock"); } return dnMap.get(dn).getBlock(blockID); }); // Mock readChunk containerProtocolMock.when(() -> ContainerProtocolCalls.readChunk(any(), any(), any(), any(), any())) .thenAnswer(inv -> { XceiverClientSpi xceiverClientSpi = inv.getArgument(0); ContainerProtos.ChunkInfo chunkInfo = inv.getArgument(1); ContainerProtos.DatanodeBlockID blockId = inv.getArgument(2); List<XceiverClientSpi.Validator> checksumValidators = inv.getArgument(3); Pipeline pipeline = xceiverClientSpi.getPipeline(); assertEquals(1, pipeline.size()); DatanodeDetails dn = pipeline.getFirstNode(); if (failureLocation == FailureLocation.READ_CHUNK && dn.equals(failingPeerDetails)) { throw new IOException("Simulated peer failure for testing in readChunk"); } return dnMap.get(dn).readChunk(blockId, chunkInfo, checksumValidators); }); containerProtocolMock.when(() -> ContainerProtocolCalls.toValidatorList(any())).thenCallRealMethod(); } /** * This class wraps a KeyValueHandler instance with just enough features to test its reconciliation functionality. */ private static class MockDatanode { private final KeyValueHandler handler; private final DatanodeDetails dnDetails; private final OnDemandContainerScanner onDemandScanner; private final ContainerSet containerSet; private final OzoneConfiguration conf; private final Logger log; MockDatanode(DatanodeDetails dnDetails, Path tempDir) throws IOException { this.dnDetails = dnDetails; log = LoggerFactory.getLogger("mock-datanode-" + dnDetails.getHostName()); Path dataVolume = Paths.get(tempDir.toString(), dnDetails.getHostName(), "data"); Path metadataVolume = Paths.get(tempDir.toString(), dnDetails.getHostName(), "metadata"); this.conf = new OzoneConfiguration(); conf.set(HDDS_DATANODE_DIR_KEY, dataVolume.toString()); conf.set(OZONE_METADATA_DIRS, metadataVolume.toString()); containerSet = newContainerSet(); MutableVolumeSet volumeSet = createVolumeSet(); handler = ContainerTestUtils.getKeyValueHandler(conf, dnDetails.getUuidString(), containerSet, volumeSet, spy(new ContainerChecksumTreeManager(conf))); handler.setClusterID(CLUSTER_ID); ContainerController controller = new ContainerController(containerSet, Collections.singletonMap(ContainerProtos.ContainerType.KeyValueContainer, handler)); onDemandScanner = new OnDemandContainerScanner( conf.getObject(ContainerScannerConfiguration.class), controller); // Register the on-demand container scanner with the container set used by the KeyValueHandler. containerSet.registerOnDemandScanner(onDemandScanner); } public DatanodeDetails getDnDetails() { return dnDetails; } public KeyValueHandler getHandler() { return handler; } /** * @throws IOException for general IO errors accessing the checksum file * @throws java.io.FileNotFoundException When the checksum file does not exist. */ public ContainerProtos.GetContainerChecksumInfoResponseProto getChecksumInfo(long containerID) throws IOException { KeyValueContainer container = getContainer(containerID); ByteString checksumInfo = handler.getChecksumManager().getContainerChecksumInfo(container.getContainerData()); return ContainerProtos.GetContainerChecksumInfoResponseProto.newBuilder() .setContainerID(containerID) .setContainerChecksumInfo(checksumInfo) .build(); } /** * Verifies that the data checksum on disk matches the one in memory, and returns the data checksum. */ public long checkAndGetDataChecksum(long containerID) { KeyValueContainer container = getContainer(containerID); KeyValueContainerData containerData = container.getContainerData(); long dataChecksum = 0; try { ContainerProtos.ContainerChecksumInfo containerChecksumInfo = handler.getChecksumManager() .read(containerData); dataChecksum = containerChecksumInfo.getContainerMerkleTree().getDataChecksum(); verifyAllDataChecksumsMatch(containerData, conf); } catch (IOException ex) { fail("Failed to read container checksum from disk", ex); } log.info("Retrieved data checksum {} from container {}", checksumToString(dataChecksum), containerID); return dataChecksum; } public ContainerProtos.GetBlockResponseProto getBlock(BlockID blockID) throws IOException { KeyValueContainer container = getContainer(blockID.getContainerID()); ContainerProtos.BlockData blockData = handler.getBlockManager().getBlock(container, blockID).getProtoBufMessage(); return ContainerProtos.GetBlockResponseProto.newBuilder() .setBlockData(blockData) .build(); } public ContainerProtos.ReadChunkResponseProto readChunk(ContainerProtos.DatanodeBlockID blockId, ContainerProtos.ChunkInfo chunkInfo, List<XceiverClientSpi.Validator> validators) throws IOException { KeyValueContainer container = getContainer(blockId.getContainerID()); ContainerProtos.ReadChunkResponseProto readChunkResponseProto = ContainerProtos.ReadChunkResponseProto.newBuilder() .setBlockID(blockId) .setChunkData(chunkInfo) .setData(handler.getChunkManager().readChunk(container, BlockID.getFromProtobuf(blockId), ChunkInfo.getFromProtoBuf(chunkInfo), null).toByteString()) .build(); verifyChecksums(readChunkResponseProto, blockId, chunkInfo, validators); return readChunkResponseProto; } public void verifyChecksums(ContainerProtos.ReadChunkResponseProto readChunkResponseProto, ContainerProtos.DatanodeBlockID blockId, ContainerProtos.ChunkInfo chunkInfo, List<XceiverClientSpi.Validator> validators) throws IOException { assertFalse(validators.isEmpty()); ContainerProtos.ContainerCommandRequestProto requestProto = ContainerProtos.ContainerCommandRequestProto.newBuilder() .setCmdType(ContainerProtos.Type.ReadChunk) .setContainerID(blockId.getContainerID()) .setDatanodeUuid(dnDetails.getUuidString()) .setReadChunk( ContainerProtos.ReadChunkRequestProto.newBuilder() .setBlockID(blockId) .setChunkData(chunkInfo) .build()) .build(); ContainerProtos.ContainerCommandResponseProto responseProto = ContainerProtos.ContainerCommandResponseProto.newBuilder() .setCmdType(ContainerProtos.Type.ReadChunk) .setResult(ContainerProtos.Result.SUCCESS) .setReadChunk(readChunkResponseProto).build(); for (XceiverClientSpi.Validator function : validators) { function.accept(requestProto, responseProto); } } public KeyValueContainer getContainer(long containerID) { return (KeyValueContainer) containerSet.getContainer(containerID); } /** * Triggers a synchronous scan of the container. This method will block until the scan completes. */ public void scanContainer(long containerID) { Optional<Future<?>> scanFuture = onDemandScanner.scanContainerWithoutGap(containerSet.getContainer(containerID), TEST_SCAN); assertTrue(scanFuture.isPresent()); try { scanFuture.get().get(); } catch (InterruptedException | ExecutionException e) { fail("On demand container scan failed", e); } } public int getOnDemandScanCount() { return onDemandScanner.getMetrics().getNumContainersScanned(); } public void resetOnDemandScanCount() { onDemandScanner.getMetrics().resetNumContainersScanned(); } public void reconcileContainerSuccess(DNContainerOperationClient client, Collection<DatanodeDetails> peers, long containerID) { try { reconcileContainer(client, peers, containerID); } catch (IOException ex) { fail("Container reconciliation failed", ex); } } public void reconcileContainer(DNContainerOperationClient client, Collection<DatanodeDetails> peers, long containerID) throws IOException { log.info("Beginning reconciliation on this mock datanode"); handler.reconcileContainer(client, containerSet.getContainer(containerID), peers); } /** * Create a container with the specified number of blocks. Block data is human-readable so the block files can be * inspected when debugging the test. */ public void addContainerWithBlocks(long containerId, int blocks) throws Exception { ContainerProtos.CreateContainerRequestProto createRequest = ContainerProtos.CreateContainerRequestProto.newBuilder() .setContainerType(ContainerProtos.ContainerType.KeyValueContainer) .build(); ContainerProtos.ContainerCommandRequestProto request = ContainerProtos.ContainerCommandRequestProto.newBuilder() .setCmdType(ContainerProtos.Type.CreateContainer) .setCreateContainer(createRequest) .setContainerID(containerId) .setDatanodeUuid(dnDetails.getUuidString()) .build(); handler.handleCreateContainer(request, null); KeyValueContainer container = getContainer(containerId); // Verify container is initially empty. File chunksPath = new File(container.getContainerData().getChunksPath()); ContainerLayoutTestInfo.FILE_PER_BLOCK.validateFileCount(chunksPath, 0, 0); // Create data to put in the container. // Seed using the container ID so that all replicas are identical. RandomStringGenerator generator = new RandomStringGenerator.Builder() .withinRange('a', 'z') .usingRandom(new Random(containerId)::nextInt) .get(); // This array will keep getting populated with new bytes for each chunk. byte[] chunkData = new byte[CHUNK_LEN]; int bytesPerChecksum = 2 * (int) OzoneConsts.KB; // Add data to the container. List<ContainerProtos.ChunkInfo> chunkList = new ArrayList<>(); for (int i = 0; i < blocks; i++) { BlockID blockID = new BlockID(containerId, i); BlockData blockData = new BlockData(blockID); chunkList.clear(); for (long chunkCount = 0; chunkCount < CHUNKS_PER_BLOCK; chunkCount++) { String chunkName = "chunk" + chunkCount; long offset = chunkCount * chunkData.length; ChunkInfo info = new ChunkInfo(chunkName, offset, chunkData.length); // Generate data for the chunk and compute its checksum. // Data is generated as one ascii character per line, so block files are human-readable if further // debugging is needed. for (int c = 0; c < chunkData.length; c += 2) { chunkData[c] = (byte)generator.generate(1).charAt(0); chunkData[c + 1] = (byte)'\n'; } Checksum checksum = new Checksum(ContainerProtos.ChecksumType.CRC32, bytesPerChecksum); ChecksumData checksumData = checksum.computeChecksum(chunkData); info.setChecksumData(checksumData); // Write chunk and checksum into the container. chunkList.add(info.getProtoBufMessage()); handler.getChunkManager().writeChunk(container, blockID, info, ByteBuffer.wrap(chunkData), WRITE_STAGE); } handler.getChunkManager().finishWriteChunks(container, blockData); blockData.setChunks(chunkList); blockData.setBlockCommitSequenceId(i); handler.getBlockManager().putBlock(container, blockData); } ContainerLayoutTestInfo.FILE_PER_BLOCK.validateFileCount(chunksPath, blocks, (long) blocks * CHUNKS_PER_BLOCK); container.markContainerForClose(); handler.closeContainer(container); } @Override public String toString() { return dnDetails.toString(); } /** * Returns a list of all blocks in the container sorted numerically by blockID. * For example, the unsorted list would have the first blocks as 1, 10, 11... * The list returned by this method would have the first blocks as 1, 2, 3... */ private List<BlockData> getSortedBlocks(KeyValueContainer container) throws IOException { List<BlockData> blockDataList = handler.getBlockManager().listBlock(container, -1, 100); blockDataList.sort(Comparator.comparingLong(BlockData::getLocalID)); return blockDataList; } /** * Introduce corruption in the container. * 1. Delete blocks from the container. * 2. Corrupt chunks at an offset. * If revers is true, the blocks and chunks are deleted in reverse order. */ public void introduceCorruption(long containerID, int numBlocksToRemove, int numChunksToCorrupt, boolean reverse) throws IOException { KeyValueContainer container = getContainer(containerID); KeyValueContainerData containerData = container.getContainerData(); // Simulate missing blocks try (DBHandle handle = BlockUtils.getDB(containerData, conf); BatchOperation batch = handle.getStore().getBatchHandler().initBatchOperation()) { List<BlockData> blockDataList = getSortedBlocks(container); int size = blockDataList.size(); for (int i = 0; i < numBlocksToRemove; i++) { BlockData blockData = reverse ? blockDataList.get(size - 1 - i) : blockDataList.get(i); File blockFile = TestContainerCorruptions.getBlock(container, blockData.getBlockID().getLocalID()); Assertions.assertTrue(blockFile.delete()); handle.getStore().getBlockDataTable().deleteWithBatch(batch, containerData.getBlockKey(blockData.getLocalID())); log.info("Deleting block {} from container {}", blockData.getBlockID().getLocalID(), containerID); } handle.getStore().getBatchHandler().commitBatchOperation(batch); // Check that the correct number of blocks were deleted. blockDataList = getSortedBlocks(container); assertEquals(numBlocksToRemove, size - blockDataList.size()); } // Corrupt chunks at an offset. List<BlockData> blockDataList = getSortedBlocks(container); int size = blockDataList.size(); for (int i = 0; i < numChunksToCorrupt; i++) { int blockIndex = reverse ? size - 1 - (i % size) : i % size; BlockData blockData = blockDataList.get(blockIndex); int chunkIndex = i / size; File blockFile = TestContainerCorruptions.getBlock(container, blockData.getBlockID().getLocalID()); List<ContainerProtos.ChunkInfo> chunks = new ArrayList<>(blockData.getChunks()); ContainerProtos.ChunkInfo chunkInfo = chunks.remove(chunkIndex); corruptFileAtOffset(blockFile, chunkInfo.getOffset(), chunkInfo.getLen()); log.info("Corrupting block {} at offset {} in container {}", blockData.getBlockID().getLocalID(), chunkInfo.getOffset(), containerID); } } private MutableVolumeSet createVolumeSet() throws IOException { MutableVolumeSet volumeSet = new MutableVolumeSet(dnDetails.getUuidString(), conf, null, StorageVolume.VolumeType.DATA_VOLUME, null); createDbInstancesForTestIfNeeded(volumeSet, CLUSTER_ID, CLUSTER_ID, conf); return volumeSet; } /** * Overwrite the file with random bytes at an offset within the given length. */ private static void corruptFileAtOffset(File file, long offset, long chunkLength) { try { final int fileLength = (int) file.length(); assertTrue(fileLength >= offset + chunkLength); final int chunkEnd = (int)(offset + chunkLength); Path path = file.toPath(); final byte[] original = IOUtils.readFully(Files.newInputStream(path), fileLength); // Corrupt the last byte and middle bytes of the block. The scanner should log this as two errors. final byte[] corruptedBytes = Arrays.copyOf(original, fileLength); corruptedBytes[chunkEnd - 1] = (byte) (original[chunkEnd - 1] << 1); final long chunkMid = offset + (chunkLength - offset) / 2; corruptedBytes[(int) (chunkMid / 2)] = (byte) (original[(int) (chunkMid / 2)] << 1); Files.write(path, corruptedBytes, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.SYNC); assertThat(IOUtils.readFully(Files.newInputStream(path), fileLength)) .isEqualTo(corruptedBytes) .isNotEqualTo(original); } catch (IOException ex) { // Fail the test. throw new UncheckedIOException(ex); } } } }
googleapis/google-cloud-java
35,095
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/BatchCreateTensorboardTimeSeriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/tensorboard_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Response message for * [TensorboardService.BatchCreateTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.BatchCreateTensorboardTimeSeries]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse} */ public final class BatchCreateTensorboardTimeSeriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) BatchCreateTensorboardTimeSeriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use BatchCreateTensorboardTimeSeriesResponse.newBuilder() to construct. private BatchCreateTensorboardTimeSeriesResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchCreateTensorboardTimeSeriesResponse() { tensorboardTimeSeries_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchCreateTensorboardTimeSeriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1_BatchCreateTensorboardTimeSeriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1_BatchCreateTensorboardTimeSeriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse.class, com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse.Builder.class); } public static final int TENSORBOARD_TIME_SERIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1.TensorboardTimeSeries> tensorboardTimeSeries_; /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1.TensorboardTimeSeries> getTensorboardTimeSeriesList() { return tensorboardTimeSeries_; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder> getTensorboardTimeSeriesOrBuilderList() { return tensorboardTimeSeries_; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ @java.lang.Override public int getTensorboardTimeSeriesCount() { return tensorboardTimeSeries_.size(); } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.TensorboardTimeSeries getTensorboardTimeSeries(int index) { return tensorboardTimeSeries_.get(index); } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder getTensorboardTimeSeriesOrBuilder(int index) { return tensorboardTimeSeries_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tensorboardTimeSeries_.size(); i++) { output.writeMessage(1, tensorboardTimeSeries_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tensorboardTimeSeries_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, tensorboardTimeSeries_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse other = (com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) obj; if (!getTensorboardTimeSeriesList().equals(other.getTensorboardTimeSeriesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTensorboardTimeSeriesCount() > 0) { hash = (37 * hash) + TENSORBOARD_TIME_SERIES_FIELD_NUMBER; hash = (53 * hash) + getTensorboardTimeSeriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [TensorboardService.BatchCreateTensorboardTimeSeries][google.cloud.aiplatform.v1.TensorboardService.BatchCreateTensorboardTimeSeries]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1_BatchCreateTensorboardTimeSeriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1_BatchCreateTensorboardTimeSeriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse.class, com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse.Builder .class); } // Construct using // com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tensorboardTimeSeriesBuilder_ == null) { tensorboardTimeSeries_ = java.util.Collections.emptyList(); } else { tensorboardTimeSeries_ = null; tensorboardTimeSeriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.TensorboardServiceProto .internal_static_google_cloud_aiplatform_v1_BatchCreateTensorboardTimeSeriesResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse build() { com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse buildPartial() { com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse result = new com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse result) { if (tensorboardTimeSeriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tensorboardTimeSeries_ = java.util.Collections.unmodifiableList(tensorboardTimeSeries_); bitField0_ = (bitField0_ & ~0x00000001); } result.tensorboardTimeSeries_ = tensorboardTimeSeries_; } else { result.tensorboardTimeSeries_ = tensorboardTimeSeriesBuilder_.build(); } } private void buildPartial0( com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) { return mergeFrom( (com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse other) { if (other == com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse .getDefaultInstance()) return this; if (tensorboardTimeSeriesBuilder_ == null) { if (!other.tensorboardTimeSeries_.isEmpty()) { if (tensorboardTimeSeries_.isEmpty()) { tensorboardTimeSeries_ = other.tensorboardTimeSeries_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.addAll(other.tensorboardTimeSeries_); } onChanged(); } } else { if (!other.tensorboardTimeSeries_.isEmpty()) { if (tensorboardTimeSeriesBuilder_.isEmpty()) { tensorboardTimeSeriesBuilder_.dispose(); tensorboardTimeSeriesBuilder_ = null; tensorboardTimeSeries_ = other.tensorboardTimeSeries_; bitField0_ = (bitField0_ & ~0x00000001); tensorboardTimeSeriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTensorboardTimeSeriesFieldBuilder() : null; } else { tensorboardTimeSeriesBuilder_.addAllMessages(other.tensorboardTimeSeries_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1.TensorboardTimeSeries m = input.readMessage( com.google.cloud.aiplatform.v1.TensorboardTimeSeries.parser(), extensionRegistry); if (tensorboardTimeSeriesBuilder_ == null) { ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.add(m); } else { tensorboardTimeSeriesBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1.TensorboardTimeSeries> tensorboardTimeSeries_ = java.util.Collections.emptyList(); private void ensureTensorboardTimeSeriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tensorboardTimeSeries_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.TensorboardTimeSeries>( tensorboardTimeSeries_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.TensorboardTimeSeries, com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder, com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder> tensorboardTimeSeriesBuilder_; /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public java.util.List<com.google.cloud.aiplatform.v1.TensorboardTimeSeries> getTensorboardTimeSeriesList() { if (tensorboardTimeSeriesBuilder_ == null) { return java.util.Collections.unmodifiableList(tensorboardTimeSeries_); } else { return tensorboardTimeSeriesBuilder_.getMessageList(); } } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public int getTensorboardTimeSeriesCount() { if (tensorboardTimeSeriesBuilder_ == null) { return tensorboardTimeSeries_.size(); } else { return tensorboardTimeSeriesBuilder_.getCount(); } } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public com.google.cloud.aiplatform.v1.TensorboardTimeSeries getTensorboardTimeSeries( int index) { if (tensorboardTimeSeriesBuilder_ == null) { return tensorboardTimeSeries_.get(index); } else { return tensorboardTimeSeriesBuilder_.getMessage(index); } } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder setTensorboardTimeSeries( int index, com.google.cloud.aiplatform.v1.TensorboardTimeSeries value) { if (tensorboardTimeSeriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.set(index, value); onChanged(); } else { tensorboardTimeSeriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder setTensorboardTimeSeries( int index, com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder builderForValue) { if (tensorboardTimeSeriesBuilder_ == null) { ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.set(index, builderForValue.build()); onChanged(); } else { tensorboardTimeSeriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder addTensorboardTimeSeries( com.google.cloud.aiplatform.v1.TensorboardTimeSeries value) { if (tensorboardTimeSeriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.add(value); onChanged(); } else { tensorboardTimeSeriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder addTensorboardTimeSeries( int index, com.google.cloud.aiplatform.v1.TensorboardTimeSeries value) { if (tensorboardTimeSeriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.add(index, value); onChanged(); } else { tensorboardTimeSeriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder addTensorboardTimeSeries( com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder builderForValue) { if (tensorboardTimeSeriesBuilder_ == null) { ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.add(builderForValue.build()); onChanged(); } else { tensorboardTimeSeriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder addTensorboardTimeSeries( int index, com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder builderForValue) { if (tensorboardTimeSeriesBuilder_ == null) { ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.add(index, builderForValue.build()); onChanged(); } else { tensorboardTimeSeriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder addAllTensorboardTimeSeries( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.TensorboardTimeSeries> values) { if (tensorboardTimeSeriesBuilder_ == null) { ensureTensorboardTimeSeriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tensorboardTimeSeries_); onChanged(); } else { tensorboardTimeSeriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder clearTensorboardTimeSeries() { if (tensorboardTimeSeriesBuilder_ == null) { tensorboardTimeSeries_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tensorboardTimeSeriesBuilder_.clear(); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public Builder removeTensorboardTimeSeries(int index) { if (tensorboardTimeSeriesBuilder_ == null) { ensureTensorboardTimeSeriesIsMutable(); tensorboardTimeSeries_.remove(index); onChanged(); } else { tensorboardTimeSeriesBuilder_.remove(index); } return this; } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder getTensorboardTimeSeriesBuilder(int index) { return getTensorboardTimeSeriesFieldBuilder().getBuilder(index); } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder getTensorboardTimeSeriesOrBuilder(int index) { if (tensorboardTimeSeriesBuilder_ == null) { return tensorboardTimeSeries_.get(index); } else { return tensorboardTimeSeriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder> getTensorboardTimeSeriesOrBuilderList() { if (tensorboardTimeSeriesBuilder_ != null) { return tensorboardTimeSeriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tensorboardTimeSeries_); } } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder addTensorboardTimeSeriesBuilder() { return getTensorboardTimeSeriesFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1.TensorboardTimeSeries.getDefaultInstance()); } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder addTensorboardTimeSeriesBuilder(int index) { return getTensorboardTimeSeriesFieldBuilder() .addBuilder( index, com.google.cloud.aiplatform.v1.TensorboardTimeSeries.getDefaultInstance()); } /** * * * <pre> * The created TensorboardTimeSeries. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.TensorboardTimeSeries tensorboard_time_series = 1; * </code> */ public java.util.List<com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder> getTensorboardTimeSeriesBuilderList() { return getTensorboardTimeSeriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.TensorboardTimeSeries, com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder, com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder> getTensorboardTimeSeriesFieldBuilder() { if (tensorboardTimeSeriesBuilder_ == null) { tensorboardTimeSeriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.TensorboardTimeSeries, com.google.cloud.aiplatform.v1.TensorboardTimeSeries.Builder, com.google.cloud.aiplatform.v1.TensorboardTimeSeriesOrBuilder>( tensorboardTimeSeries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tensorboardTimeSeries_ = null; } return tensorboardTimeSeriesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse) private static final com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse(); } public static com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchCreateTensorboardTimeSeriesResponse> PARSER = new com.google.protobuf.AbstractParser<BatchCreateTensorboardTimeSeriesResponse>() { @java.lang.Override public BatchCreateTensorboardTimeSeriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchCreateTensorboardTimeSeriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchCreateTensorboardTimeSeriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.BatchCreateTensorboardTimeSeriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-kie-drools
34,898
kie-dmn/kie-dmn-legacy-tests/src/test/java/org/kie/dmn/legacy/tests/core/v1_1/decisionservices/DMNDecisionServicesTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kie.dmn.legacy.tests.core.v1_1.decisionservices; import java.math.BigDecimal; import java.util.Map; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.kie.api.builder.Message.Level; import org.kie.dmn.api.core.DMNContext; import org.kie.dmn.api.core.DMNModel; import org.kie.dmn.api.core.DMNResult; import org.kie.dmn.api.core.DMNRuntime; import org.kie.dmn.core.api.DMNFactory; import org.kie.dmn.core.compiler.CoerceDecisionServiceSingletonOutputOption; import org.kie.dmn.core.util.DMNRuntimeUtil; import org.kie.dmn.legacy.tests.core.v1_1.BaseDMN1_1VariantTest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.assertj.core.api.Assertions.assertThat; public class DMNDecisionServicesTest extends BaseDMN1_1VariantTest { public static final Logger LOG = LoggerFactory.getLogger(DMNDecisionServicesTest.class); @ParameterizedTest(name = "{0}") @MethodSource("params") void basic(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("0004-decision-services.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/definitions/_686f58d4-4ec3-4c65-8c06-0e4fd8983def", "Decision Services"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); checkDSwithInputData(runtime, dmnModel); checkDSwithInputDecision(runtime, dmnModel); checkDSwithInputDecision2(runtime, dmnModel); } private void checkDSwithInputData(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "A only as output knowing D and E"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("A")).isEqualTo("de"); } private void checkDSwithInputDecision(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "A Only Knowing B and C"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("A")).isNull(); // because B and C are not defined in input. } private void checkDSwithInputDecision2(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); context.set("B", "inB"); context.set("C", "inC"); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "A Only Knowing B and C"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("A")).isEqualTo("inBinC"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSInLiteralExpression(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("DecisionServicesInLiteralExpression.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/definitions/_686f58d4-4ec3-4c65-8c06-0e4fd8983def", "Decision Services"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("Decide based on A and DS")).isEqualTo("xyde"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSInLiteralExpressionWithBKM(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("DecisionServicesInLiteralExpressionWithBKM.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/definitions/_686f58d4-4ec3-4c65-8c06-0e4fd8983def", "Decision Services"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("Decide based on A and DS")).isEqualTo("xydemn"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSInLiteralExpressionWithBKMUsingInvocation(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("DecisionServicesInLiteralExpressionWithBKMUsingInvocation.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/definitions/_686f58d4-4ec3-4c65-8c06-0e4fd8983def", "Decision Services"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("Decide based on A and DS")).isEqualTo("xydemn"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSInLiteralExpressionOnlyfromBKMUsingInvocation(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("DecisionServicesInLiteralExpressionOnlyFromBKMUsingInvocation.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/definitions/_686f58d4-4ec3-4c65-8c06-0e4fd8983def", "Decision Services"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext context = DMNFactory.newContext(); context.set("D", "d"); context.set("E", "e"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("Decide based on A and DS")).isEqualTo("demn"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void mixtypeDS(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("mixtype-DS.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_c9885563-aa54-4c7b-ae8a-738cfd29b544", "mixtype DS"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext context = DMNFactory.newContext(); context.set("Person name", "John"); context.set("Person year of birth", BigDecimal.valueOf(1980)); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("Greet the Person")).isEqualTo("Hello, John"); assertThat(result.get("Person age")).isEqualTo(BigDecimal.valueOf(38)); assertThat(result.get("is Person an adult")).isEqualTo(Boolean.TRUE); assertThat((Map<String, Object>) result.get("eval DS all")).containsEntry("Greet the Person", "Hello, ds all"); assertThat((Map<String, Object>) result.get("eval DS all")).containsEntry("Person age", BigDecimal.valueOf(18)); assertThat((Map<String, Object>) result.get("eval DS all")).containsEntry("is Person an adult", true); assertThat((Map<String, Object>) result.get("eval DS all")).doesNotContainKey("hardcoded now"); assertThat((Map<String, Object>) result.get("eval DS encapsulate")).containsEntry("Greet the Person", "Hello, DS encapsulate"); assertThat((Map<String, Object>) result.get("eval DS encapsulate")).doesNotContainKey("Person age"); assertThat((Map<String, Object>) result.get("eval DS encapsulate")).containsEntry("is Person an adult", true); assertThat((Map<String, Object>) result.get("eval DS encapsulate")).doesNotContainKey("hardcoded now"); assertThat((Map<String, Object>) result.get("eval DS greet adult")).containsEntry("Greet the Person", "Hello, DS greet adult"); assertThat((Map<String, Object>) result.get("eval DS greet adult")).doesNotContainKey("Person age"); assertThat((Map<String, Object>) result.get("eval DS greet adult")).containsEntry("is Person an adult", true); assertThat((Map<String, Object>) result.get("eval DS greet adult")).doesNotContainKey("hardcoded now"); // additionally check DS one-by-one testMixtypeDS_checkDSall(runtime, dmnModel); testMixtypeDS_checkDSencapsulate(runtime, dmnModel); testMixtypeDS_checkDSgreetadult(runtime, dmnModel); } private void testMixtypeDS_checkDSall(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", "John"); context.set("Person year of birth", BigDecimal.valueOf(2008)); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "DS all"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.getAll()).containsEntry("Greet the Person", "Hello, John"); assertThat(result.getAll()).containsEntry("Person age", BigDecimal.valueOf(10)); assertThat(result.getAll()).containsEntry("is Person an adult", false); assertThat(result.getAll()).doesNotContainKey("hardcoded now"); } private void testMixtypeDS_checkDSencapsulate(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", "John"); context.set("Person year of birth", BigDecimal.valueOf(2008)); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "DS encapsulate"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.getAll()).containsEntry("Greet the Person", "Hello, John"); assertThat(result.getAll()).doesNotContainKey("Person age"); assertThat(result.getAll()).containsEntry("is Person an adult", false); assertThat(result.getAll()).doesNotContainKey("hardcoded now"); } private void testMixtypeDS_checkDSgreetadult(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", "John"); context.set("Person age", BigDecimal.valueOf(10)); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "DS greet adult"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.getAll()).containsEntry("Greet the Person", "Hello, John"); assertThat(dmnResult.getDecisionResultByName("Person age")).isNull(); assertThat(result.getAll()).containsEntry("is Person an adult", false); assertThat(result.getAll()).doesNotContainKey("hardcoded now"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSForTypeCheck(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("DecisionService20180718.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_6eef3a7c-bb0d-40bb-858d-f9067789c18a", "Decision Service 20180718"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); testDSForTypeCheck_runNormal(runtime, dmnModel); testDSForTypeCheck_runAllDecisionsWithWrongTypes(runtime, dmnModel); testDSForTypeCheck_runDecisionService_Normal(runtime, dmnModel); testDSForTypeCheck_runDecisionService_WithWrongTypes(runtime, dmnModel); } private void testDSForTypeCheck_runNormal(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", "John"); context.set("Person age", BigDecimal.valueOf(21)); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("Greet the person")).isEqualTo("Hello, John"); assertThat(result.get("is Person at age allowed")).isEqualTo(Boolean.TRUE); assertThat(result.get("Final Decision")).isEqualTo("Hello, John; you are allowed"); } private void testDSForTypeCheck_runAllDecisionsWithWrongTypes(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", BigDecimal.valueOf(21)); context.set("Person age", "John"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isTrue(); } private void testDSForTypeCheck_runDecisionService_Normal(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", "John"); context.set("Person age", BigDecimal.valueOf(21)); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "DS given inputdata"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.getAll()).doesNotContainKey("Greet the person"); // Decision Service will encapsulate this decision assertThat(result.getAll()).doesNotContainKey("is Person at age allowed"); // Decision Service will encapsulate this decision assertThat(result.get("Final Decision")).isEqualTo("Hello, John; you are allowed"); } private void testDSForTypeCheck_runDecisionService_WithWrongTypes(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("Person name", BigDecimal.valueOf(21)); context.set("Person age", "John"); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "DS given inputdata"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.getMessages().stream().anyMatch(m -> m.getSourceId().equals("_cf49add9-84a4-40ac-8306-1eea599ff43c") && m.getLevel() == Level.WARNING)) .as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isTrue(); } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSSingletonOrMultipleOutputDecisions(VariantTestConf conf) { testConfig = conf; final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("Decision-Services-singleton-or-multiple-output-decisions.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_b4ebfbf2-8608-4297-9662-be70bab01974", "Decision Services singleton or multiple output decisions"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext emptyContext = DMNFactory.newContext(); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, emptyContext); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("a Value")).isEqualTo("a string Value"); assertThat(result.get("a String Value")).isEqualTo("a String Value"); assertThat(result.get("a Number Value")).isEqualTo(BigDecimal.valueOf(47)); assertThat(result.get("eval DS with singleton value")).isEqualTo("a string Value"); assertThat((Map<String, Object>) result.get("eval DS with multiple output decisions")).containsEntry("a String Value", "a String Value"); assertThat((Map<String, Object>) result.get("eval DS with multiple output decisions")).containsEntry("a Number Value", BigDecimal.valueOf(47)); final DMNResult dmnResultDSSingleton = runtime.evaluateDecisionService(dmnModel, emptyContext, "DS with singleton value"); LOG.debug("{}", dmnResultDSSingleton); dmnResultDSSingleton.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResultDSSingleton.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResultDSSingleton.getMessages())).isFalse(); assertThat(dmnResultDSSingleton.getContext().get("a Value")).isEqualTo("a string Value"); assertThat(dmnResultDSSingleton.getContext().getAll()).doesNotContainKey("a String Value"); // Decision Service will not expose (nor encapsulate hence not execute) this decision. assertThat(dmnResultDSSingleton.getContext().getAll()).doesNotContainKey("a Number Value"); // Decision Service will not expose (nor encapsulate hence not execute) this decision. final DMNResult dmnResultMultiple = runtime.evaluateDecisionService(dmnModel, emptyContext, "DS with multiple output decisions"); LOG.debug("{}", dmnResultMultiple); dmnResultMultiple.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResultMultiple.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResultMultiple.getMessages())).isFalse(); assertThat(dmnResultMultiple.getContext().get("a String Value")).isEqualTo("a String Value"); assertThat(dmnResultMultiple.getContext().get("a Number Value")).isEqualTo(BigDecimal.valueOf(47)); assertThat(dmnResultMultiple.getContext().getAll()).doesNotContainKey("a Value"); // Decision Service will not expose (nor encapsulate hence not execute) this decision. } @ParameterizedTest(name = "{0}") @MethodSource("params") void dSSingletonOrMultipleOutputDecisionsOVERRIDE(VariantTestConf conf) { testConfig = conf; try { System.setProperty(CoerceDecisionServiceSingletonOutputOption.PROPERTY_NAME, "false"); final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("Decision-Services-singleton-or-multiple-output-decisions.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_b4ebfbf2-8608-4297-9662-be70bab01974", "Decision Services singleton or multiple output decisions"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); final DMNContext emptyContext = DMNFactory.newContext(); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, emptyContext); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("a Value")).isEqualTo("a string Value"); assertThat(result.get("a String Value")).isEqualTo("a String Value"); assertThat(result.get("a Number Value")).isEqualTo(BigDecimal.valueOf(47)); assertThat((Map<String, Object>) result.get("eval DS with singleton value")).containsEntry("a Value", "a string Value"); // DIFFERENCE with base test assertThat((Map<String, Object>) result.get("eval DS with multiple output decisions")).containsEntry("a String Value", "a String Value"); assertThat((Map<String, Object>) result.get("eval DS with multiple output decisions")).containsEntry("a Number Value", BigDecimal.valueOf(47)); final DMNResult dmnResultDSSingleton = runtime.evaluateDecisionService(dmnModel, emptyContext, "DS with singleton value"); LOG.debug("{}", dmnResultDSSingleton); dmnResultDSSingleton.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResultDSSingleton.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResultDSSingleton.getMessages())).isFalse(); assertThat(dmnResultDSSingleton.getContext().get("a Value")).isEqualTo("a string Value"); assertThat(dmnResultDSSingleton.getContext().getAll()).doesNotContainKey("a String Value"); // Decision Service will not expose (nor encapsulate hence not execute) this decision. assertThat(dmnResultDSSingleton.getContext().getAll()).doesNotContainKey("a Number Value"); // Decision Service will not expose (nor encapsulate hence not execute) this decision. final DMNResult dmnResultMultiple = runtime.evaluateDecisionService(dmnModel, emptyContext, "DS with multiple output decisions"); LOG.debug("{}", dmnResultMultiple); dmnResultMultiple.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResultMultiple.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResultMultiple.getMessages())).isFalse(); assertThat(dmnResultMultiple.getContext().get("a String Value")).isEqualTo("a String Value"); assertThat(dmnResultMultiple.getContext().get("a Number Value")).isEqualTo(BigDecimal.valueOf(47)); assertThat(dmnResultMultiple.getContext().getAll()).doesNotContainKey("a Value"); // Decision Service will not expose (nor encapsulate hence not execute) this decision. } catch (final Exception e) { LOG.error("{}", e.getLocalizedMessage(), e); throw e; } finally { System.clearProperty(CoerceDecisionServiceSingletonOutputOption.PROPERTY_NAME); assertThat(System.getProperty(CoerceDecisionServiceSingletonOutputOption.PROPERTY_NAME)).isNull(); } } @ParameterizedTest(name = "{0}") @MethodSource("params") void importDS(VariantTestConf conf) { testConfig = conf; // DROOLS-2768 DMN Decision Service encapsulate Decision which imports a Decision Service final DMNRuntime runtime = DMNRuntimeUtil.createRuntimeWithAdditionalResources("DecisionService20180718.dmn", this.getClass(), "ImportDecisionService20180718.dmn"); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_0ff3708a-c861-4a96-b85c-7b882f18b7a1", "Import Decision Service 20180718"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); testImportDS_testEvaluateAll(runtime, dmnModel); testImportDS_testEvaluateDS(runtime, dmnModel); } private void testImportDS_testEvaluateAll(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("L1 person name", "L1 Import John"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("invoke imported DS")).isEqualTo("Hello, L1 Import John; you are allowed"); assertThat(result.get("Prefixing")).isEqualTo("Hello, L1 Import John"); assertThat(result.get("final Import L1 decision")).isEqualTo("Hello, L1 Import John the result of invoking the imported DS is: Hello, L1 Import John; you are allowed"); } private void testImportDS_testEvaluateDS(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("L1 person name", "L1 Import Evaluate DS NAME"); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "Import L1 DS"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.getAll()).doesNotContainKey("invoke imported DS"); // Decision Service will encapsulate this decision assertThat(result.getAll()).doesNotContainKey("Prefixing"); // Decision Service will encapsulate this decision assertThat(result.get("final Import L1 decision")).isEqualTo("Hello, L1 Import Evaluate DS NAME the result of invoking the imported DS is: Hello, L1 Import Evaluate DS NAME; you are allowed"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void transitiveImportDS(VariantTestConf conf) { testConfig = conf; // DROOLS-2768 DMN Decision Service encapsulate Decision which imports a Decision Service final DMNRuntime runtime = DMNRuntimeUtil.createRuntimeWithAdditionalResources("DecisionService20180718.dmn", this.getClass(), "ImportDecisionService20180718.dmn", "ImportofImportDecisionService20180718.dmn"); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_6698dc07-cc43-47ec-8187-8faa7d8c35ba", "Import of Import Decision Service 20180718"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); testTransitiveImportDS_testEvaluateAll(runtime, dmnModel); testTransitiveImportDS_testEvaluateDS(runtime, dmnModel); } private void testTransitiveImportDS_testEvaluateAll(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("L2 Person name", "L2 Bob"); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("L2 Invoking the L1 import")).isEqualTo("Hello, L2 Bob the result of invoking the imported DS is: Hello, L2 Bob; you are allowed"); assertThat(result.get("Final L2 Decision")).isEqualTo("The result of invoking the L1 DS was: Hello, L2 Bob the result of invoking the imported DS is: Hello, L2 Bob; you are allowed"); } private void testTransitiveImportDS_testEvaluateDS(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); context.set("L2 Person name", "L2 Bob DS"); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "L2 DS"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.getAll()).doesNotContainKey("L2 Invoking the L1 import"); // Decision Service will encapsulate this decision assertThat(result.get("Final L2 Decision")).isEqualTo("The result of invoking the L1 DS was: Hello, L2 Bob DS the result of invoking the imported DS is: Hello, L2 Bob DS; you are allowed"); } @ParameterizedTest(name = "{0}") @MethodSource("params") void decisionServiceCompiler20180830(VariantTestConf conf) { testConfig = conf; // DROOLS-2943 DMN DecisionServiceCompiler not correctly wired for DMNv1.2 format final DMNRuntime runtime = DMNRuntimeUtil.createRuntime("DecisionServiceABC.dmn", this.getClass()); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/dmn/definitions/_2443d3f5-f178-47c6-a0c9-b1fd1c933f60", "Drawing 1"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); testDecisionServiceCompiler20180830_testEvaluateDS(runtime, dmnModel); testDecisionServiceCompiler20180830_testEvaluateAll(runtime, dmnModel); } public static void testDecisionServiceCompiler20180830_testEvaluateAll(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); assertThat(result.get("ABC")).isEqualTo("abc"); assertThat(result.get("Invoking Decision")).isEqualTo("abc"); } public static void testDecisionServiceCompiler20180830_testEvaluateDS(final DMNRuntime runtime, final DMNModel dmnModel) { final DMNContext context = DMNFactory.newContext(); final DMNResult dmnResult = runtime.evaluateDecisionService(dmnModel, context, "Decision Service ABC"); LOG.debug("{}", dmnResult); dmnResult.getDecisionResults().forEach(x -> LOG.debug("{}", x)); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); final DMNContext result = dmnResult.getContext(); // NOTE: Decision Service "Decision Service ABC" does NOT encapsulate any decision. assertThat(result.getAll()).doesNotContainKey("Invoking Decision"); // we invoked only the Decision Service, not this other Decision in the model. assertThat(result.get("ABC")).isEqualTo("abc"); } }
apache/hadoop
35,187
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ha; import java.io.IOException; import java.util.Collections; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.SecurityUtil; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.Watcher.Event; import org.apache.zookeeper.client.ZKClientConfig; import org.apache.zookeeper.common.ClientX509Util; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.ZooDefs.Ids; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback; import org.apache.hadoop.ha.ActiveStandbyElector.ActiveNotFoundException; import org.apache.hadoop.util.ZKUtil.ZKAuthInfo; import org.apache.hadoop.test.GenericTestUtils; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.any; public class TestActiveStandbyElector { private ZooKeeper mockZK; private int count; private ActiveStandbyElectorCallback mockApp; private final byte[] data = new byte[8]; private ActiveStandbyElectorTester elector; class ActiveStandbyElectorTester extends ActiveStandbyElector { private int sleptFor = 0; ActiveStandbyElectorTester(String hostPort, int timeout, String parent, List<ACL> acl, ActiveStandbyElectorCallback app) throws IOException, KeeperException { super(hostPort, timeout, parent, acl, Collections .<ZKAuthInfo> emptyList(), app, CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT, null); } @Override public ZooKeeper connectToZooKeeper() { ++count; return mockZK; } @Override protected void sleepFor(int ms) { // don't sleep in unit tests! Instead, just record the amount of // time slept LOG.info("Would have slept for " + ms + "ms"); sleptFor += ms; } } private static final String ZK_PARENT_NAME = "/parent/node"; private static final String ZK_LOCK_NAME = ZK_PARENT_NAME + "/" + ActiveStandbyElector.LOCK_FILENAME; private static final String ZK_BREADCRUMB_NAME = ZK_PARENT_NAME + "/" + ActiveStandbyElector.BREADCRUMB_FILENAME; @BeforeEach public void init() throws IOException, KeeperException { count = 0; mockZK = Mockito.mock(ZooKeeper.class); mockApp = Mockito.mock(ActiveStandbyElectorCallback.class); elector = new ActiveStandbyElectorTester("hostPort", 1000, ZK_PARENT_NAME, Ids.OPEN_ACL_UNSAFE, mockApp); } /** * Set up the mock ZK to return no info for a prior active in ZK. */ private void mockNoPriorActive() throws Exception { Mockito.doThrow(new KeeperException.NoNodeException()).when(mockZK) .getData(Mockito.eq(ZK_BREADCRUMB_NAME), Mockito.anyBoolean(), Mockito.<Stat>any()); } /** * Set up the mock to return info for some prior active node in ZK./ */ private void mockPriorActive(byte[] data) throws Exception { Mockito.doReturn(data).when(mockZK) .getData(Mockito.eq(ZK_BREADCRUMB_NAME), Mockito.anyBoolean(), Mockito.<Stat>any()); } /** * verify that joinElection checks for null data */ @Test public void testJoinElectionException() { assertThrows(HadoopIllegalArgumentException.class, () -> elector.joinElection(null)); } /** * verify that joinElection tries to create ephemeral lock znode */ @Test public void testJoinElection() { elector.joinElection(data); Mockito.verify(mockZK, Mockito.times(1)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); } /** * verify that successful znode create result becomes active and monitoring is * started */ @Test public void testCreateNodeResultBecomeActive() throws Exception { mockNoPriorActive(); elector.joinElection(data); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); verifyExistCall(1); // monitor callback verifies the leader is ephemeral owner of lock but does // not call becomeActive since its already active Stat stat = new Stat(); stat.setEphemeralOwner(1L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); // should not call neutral mode/standby/active Mockito.verify(mockApp, Mockito.times(0)).enterNeutralMode(); Mockito.verify(mockApp, Mockito.times(0)).becomeStandby(); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); // another joinElection not called. Mockito.verify(mockZK, Mockito.times(1)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); // no new monitor called verifyExistCall(1); } /** * Verify that, when the callback fails to enter active state, * the elector rejoins the election after sleeping for a short period. */ @Test public void testFailToBecomeActive() throws Exception { mockNoPriorActive(); elector.joinElection(data); assertEquals(0, elector.sleptFor); Mockito.doThrow(new ServiceFailedException("failed to become active")) .when(mockApp).becomeActive(); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); // Should have tried to become active Mockito.verify(mockApp).becomeActive(); // should re-join Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); assertEquals(2, count); assertTrue(elector.sleptFor > 0); } /** * Verify that, when the callback fails to enter active state, after * a ZK disconnect (i.e from the StatCallback), that the elector rejoins * the election after sleeping for a short period. */ @Test public void testFailToBecomeActiveAfterZKDisconnect() throws Exception { mockNoPriorActive(); elector.joinElection(data); assertEquals(0, elector.sleptFor); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); verifyExistCall(1); Stat stat = new Stat(); stat.setEphemeralOwner(1L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); // Fake failure to become active from within the stat callback Mockito.doThrow(new ServiceFailedException("fail to become active")) .when(mockApp).becomeActive(); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); // should re-join Mockito.verify(mockZK, Mockito.times(3)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); assertEquals(2, count); assertTrue(elector.sleptFor > 0); } /** * Verify that, if there is a record of a prior active node, the * elector asks the application to fence it before becoming active. */ @Test public void testFencesOldActive() throws Exception { byte[] fakeOldActiveData = new byte[0]; mockPriorActive(fakeOldActiveData); elector.joinElection(data); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); // Application fences active. Mockito.verify(mockApp, Mockito.times(1)).fenceOldActive( fakeOldActiveData); // Updates breadcrumb node to new data Mockito.verify(mockZK, Mockito.times(1)).setData( Mockito.eq(ZK_BREADCRUMB_NAME), Mockito.eq(data), Mockito.eq(0)); // Then it becomes active itself Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); } @Test public void testQuitElectionRemovesBreadcrumbNode() throws Exception { mockNoPriorActive(); elector.joinElection(data); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); // Writes its own active info Mockito.verify(mockZK, Mockito.times(1)).create( Mockito.eq(ZK_BREADCRUMB_NAME), Mockito.eq(data), Mockito.eq(Ids.OPEN_ACL_UNSAFE), Mockito.eq(CreateMode.PERSISTENT)); mockPriorActive(data); elector.quitElection(false); // Deletes its own active data Mockito.verify(mockZK, Mockito.times(1)).delete( Mockito.eq(ZK_BREADCRUMB_NAME), Mockito.eq(0)); } /** * verify that znode create for existing node and no retry becomes standby and * monitoring is started */ @Test public void testCreateNodeResultBecomeStandby() { elector.joinElection(data); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); } /** * verify that znode create error result in fatal error */ @Test public void testCreateNodeResultError() { elector.joinElection(data); elector.processResult(Code.APIERROR.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).notifyFatalError( "Received create error from Zookeeper. code:APIERROR " + "for path " + ZK_LOCK_NAME); } /** * verify that retry of network errors verifies master by session id and * becomes active if they match. monitoring is started. */ @Test public void testCreateNodeResultRetryBecomeActive() throws Exception { mockNoPriorActive(); elector.joinElection(data); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); // 4 errors results in fatalError Mockito .verify(mockApp, Mockito.times(1)) .notifyFatalError( "Received create error from Zookeeper. code:CONNECTIONLOSS " + "for path " + ZK_LOCK_NAME + ". " + "Not retrying further znode create connection errors."); elector.joinElection(data); // recreate connection via getNewZooKeeper assertEquals(2, count); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); verifyExistCall(1); Stat stat = new Stat(); stat.setEphemeralOwner(1L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); verifyExistCall(1); Mockito.verify(mockZK, Mockito.times(6)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); } /** * verify that retry of network errors verifies active by session id and * becomes standby if they dont match. monitoring is started. */ @Test public void testCreateNodeResultRetryBecomeStandby() { elector.joinElection(data); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); verifyExistCall(1); Stat stat = new Stat(); stat.setEphemeralOwner(0); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); } /** * verify that if create znode results in nodeexists and that znode is deleted * before exists() watch is set then the return of the exists() method results * in attempt to re-create the znode and become active */ @Test public void testCreateNodeResultRetryNoNode() { elector.joinElection(data); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); verifyExistCall(1); elector.processResult(Code.NONODE.intValue(), ZK_LOCK_NAME, mockZK, (Stat) null); Mockito.verify(mockApp, Mockito.times(1)).enterNeutralMode(); Mockito.verify(mockZK, Mockito.times(4)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); } /** * verify that more than 3 network error retries result fatalError */ @Test public void testStatNodeRetry() { elector.joinElection(data); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, (Stat) null); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, (Stat) null); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, (Stat) null); elector.processResult(Code.CONNECTIONLOSS.intValue(), ZK_LOCK_NAME, mockZK, (Stat) null); Mockito .verify(mockApp, Mockito.times(1)) .notifyFatalError( "Received stat error from Zookeeper. code:CONNECTIONLOSS. "+ "Not retrying further znode monitoring connection errors."); } /** * verify error in exists() callback results in fatal error */ @Test public void testStatNodeError() { elector.joinElection(data); elector.processResult(Code.RUNTIMEINCONSISTENCY.intValue(), ZK_LOCK_NAME, mockZK, (Stat) null); Mockito.verify(mockApp, Mockito.times(0)).enterNeutralMode(); Mockito.verify(mockApp, Mockito.times(1)).notifyFatalError( "Received stat error from Zookeeper. code:RUNTIMEINCONSISTENCY"); } /** * verify behavior of watcher.process callback with non-node event */ @Test public void testProcessCallbackEventNone() throws Exception { mockNoPriorActive(); elector.joinElection(data); WatchedEvent mockEvent = Mockito.mock(WatchedEvent.class); Mockito.when(mockEvent.getType()).thenReturn(Event.EventType.None); // first SyncConnected should not do anything Mockito.when(mockEvent.getState()).thenReturn( Event.KeeperState.SyncConnected); elector.processWatchEvent(mockZK, mockEvent); Mockito.verify(mockZK, Mockito.times(0)).exists(Mockito.anyString(), Mockito.anyBoolean(), any(), any()); // disconnection should enter safe mode Mockito.when(mockEvent.getState()).thenReturn( Event.KeeperState.Disconnected); elector.processWatchEvent(mockZK, mockEvent); Mockito.verify(mockApp, Mockito.times(1)).enterNeutralMode(); // re-connection should monitor master status Mockito.when(mockEvent.getState()).thenReturn( Event.KeeperState.SyncConnected); elector.processWatchEvent(mockZK, mockEvent); verifyExistCall(1); assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.SESSIONEXPIRED.intValue(), ZK_LOCK_NAME, mockZK, new Stat()); assertFalse(elector.isMonitorLockNodePending()); // session expired should enter safe mode and initiate re-election // re-election checked via checking re-creation of new zookeeper and // call to create lock znode Mockito.when(mockEvent.getState()).thenReturn(Event.KeeperState.Expired); elector.processWatchEvent(mockZK, mockEvent); // already in safe mode above. should not enter safe mode again Mockito.verify(mockApp, Mockito.times(1)).enterNeutralMode(); // called getNewZooKeeper to create new session. first call was in // constructor assertEquals(2, count); // once in initial joinElection and one now Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); // create znode success. become master and monitor elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); verifyExistCall(2); // error event results in fatal error Mockito.when(mockEvent.getState()).thenReturn(Event.KeeperState.AuthFailed); elector.processWatchEvent(mockZK, mockEvent); Mockito.verify(mockApp, Mockito.times(1)).notifyFatalError( "Unexpected Zookeeper watch event state: AuthFailed"); // only 1 state change callback is called at a time Mockito.verify(mockApp, Mockito.times(1)).enterNeutralMode(); } /** * verify behavior of watcher.process with node event */ @Test public void testProcessCallbackEventNode() throws Exception { mockNoPriorActive(); elector.joinElection(data); // make the object go into the monitoring state elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); assertTrue(elector.isMonitorLockNodePending()); Stat stat = new Stat(); stat.setEphemeralOwner(0L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); assertFalse(elector.isMonitorLockNodePending()); WatchedEvent mockEvent = Mockito.mock(WatchedEvent.class); Mockito.when(mockEvent.getPath()).thenReturn(ZK_LOCK_NAME); // monitoring should be setup again after event is received Mockito.when(mockEvent.getType()).thenReturn( Event.EventType.NodeDataChanged); elector.processWatchEvent(mockZK, mockEvent); verifyExistCall(2); assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); assertFalse(elector.isMonitorLockNodePending()); // monitoring should be setup again after event is received Mockito.when(mockEvent.getType()).thenReturn( Event.EventType.NodeChildrenChanged); elector.processWatchEvent(mockZK, mockEvent); verifyExistCall(3); assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); assertFalse(elector.isMonitorLockNodePending()); // lock node deletion when in standby mode should create znode again // successful znode creation enters active state and sets monitor Mockito.when(mockEvent.getType()).thenReturn(Event.EventType.NodeDeleted); elector.processWatchEvent(mockZK, mockEvent); // enterNeutralMode not called when app is standby and leader is lost Mockito.verify(mockApp, Mockito.times(0)).enterNeutralMode(); // once in initial joinElection() and one now Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeActive(); verifyExistCall(4); assertTrue(elector.isMonitorLockNodePending()); stat.setEphemeralOwner(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); assertFalse(elector.isMonitorLockNodePending()); // lock node deletion in active mode should enter neutral mode and create // znode again successful znode creation enters active state and sets // monitor Mockito.when(mockEvent.getType()).thenReturn(Event.EventType.NodeDeleted); elector.processWatchEvent(mockZK, mockEvent); Mockito.verify(mockApp, Mockito.times(1)).enterNeutralMode(); // another joinElection called Mockito.verify(mockZK, Mockito.times(3)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(2)).becomeActive(); verifyExistCall(5); assertTrue(elector.isMonitorLockNodePending()); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); assertFalse(elector.isMonitorLockNodePending()); // bad path name results in fatal error Mockito.when(mockEvent.getPath()).thenReturn(null); elector.processWatchEvent(mockZK, mockEvent); Mockito.verify(mockApp, Mockito.times(1)).notifyFatalError( "Unexpected watch error from Zookeeper"); // fatal error means no new connection other than one from constructor assertEquals(1, count); // no new watches after fatal error verifyExistCall(5); } private void verifyExistCall(int times) { Mockito.verify(mockZK, Mockito.times(times)).exists( Mockito.eq(ZK_LOCK_NAME), Mockito.<Watcher>any(), Mockito.same(elector), Mockito.same(mockZK)); } /** * verify becomeStandby is not called if already in standby */ @Test public void testSuccessiveStandbyCalls() { elector.joinElection(data); // make the object go into the monitoring standby state elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); assertTrue(elector.isMonitorLockNodePending()); Stat stat = new Stat(); stat.setEphemeralOwner(0L); Mockito.when(mockZK.getSessionId()).thenReturn(1L); elector.processResult(Code.OK.intValue(), ZK_LOCK_NAME, mockZK, stat); assertFalse(elector.isMonitorLockNodePending()); WatchedEvent mockEvent = Mockito.mock(WatchedEvent.class); Mockito.when(mockEvent.getPath()).thenReturn(ZK_LOCK_NAME); // notify node deletion // monitoring should be setup again after event is received Mockito.when(mockEvent.getType()).thenReturn(Event.EventType.NodeDeleted); elector.processWatchEvent(mockZK, mockEvent); // is standby. no need to notify anything now Mockito.verify(mockApp, Mockito.times(0)).enterNeutralMode(); // another joinElection called. Mockito.verify(mockZK, Mockito.times(2)).create(ZK_LOCK_NAME, data, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); // lost election elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); // still standby. so no need to notify again Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); // monitor is set again verifyExistCall(2); } /** * verify quit election terminates connection and there are no new watches. * next call to joinElection creates new connection and performs election */ @Test public void testQuitElection() throws Exception { elector.joinElection(data); Mockito.verify(mockZK, Mockito.times(0)).close(); elector.quitElection(true); Mockito.verify(mockZK, Mockito.times(1)).close(); // no watches added verifyExistCall(0); byte[] data = new byte[8]; elector.joinElection(data); // getNewZooKeeper called 2 times. once in constructor and once now assertEquals(2, count); elector.processResult(Code.NODEEXISTS.intValue(), ZK_LOCK_NAME, mockZK, ZK_LOCK_NAME); Mockito.verify(mockApp, Mockito.times(1)).becomeStandby(); verifyExistCall(1); } /** * verify that receiveActiveData gives data when active exists, tells that * active does not exist and reports error in getting active information * * @throws IOException * @throws InterruptedException * @throws KeeperException * @throws ActiveNotFoundException */ @Test public void testGetActiveData() throws ActiveNotFoundException, KeeperException, InterruptedException, IOException { // get valid active data byte[] data = new byte[8]; Mockito.when( mockZK.getData(Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any())).thenReturn(data); assertEquals(data, elector.getActiveData()); Mockito.verify(mockZK, Mockito.times(1)).getData( Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any()); // active does not exist Mockito.when( mockZK.getData(Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any())).thenThrow( new KeeperException.NoNodeException()); try { elector.getActiveData(); fail("ActiveNotFoundException expected"); } catch(ActiveNotFoundException e) { Mockito.verify(mockZK, Mockito.times(2)).getData( Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any()); } // error getting active data rethrows keeperexception try { Mockito.when( mockZK.getData(Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any())).thenThrow( new KeeperException.AuthFailedException()); elector.getActiveData(); fail("KeeperException.AuthFailedException expected"); } catch(KeeperException.AuthFailedException ke) { Mockito.verify(mockZK, Mockito.times(3)).getData( Mockito.eq(ZK_LOCK_NAME), Mockito.eq(false), any()); } } /** * Test that ensureBaseNode() recursively creates the specified dir */ @Test public void testEnsureBaseNode() throws Exception { elector.ensureParentZNode(); StringBuilder prefix = new StringBuilder(); for (String part : ZK_PARENT_NAME.split("/")) { if (part.isEmpty()) continue; prefix.append("/").append(part); if (!"/".equals(prefix.toString())) { Mockito.verify(mockZK).create( Mockito.eq(prefix.toString()), Mockito.<byte[]>any(), Mockito.eq(Ids.OPEN_ACL_UNSAFE), Mockito.eq(CreateMode.PERSISTENT)); } } } /** * Test that ACLs are set on parent zNode even if the node already exists. */ @Test public void testParentZNodeACLs() throws Exception { KeeperException ke = new KeeperException(Code.NODEEXISTS) { @Override public Code code() { return super.code(); } }; Mockito.when(mockZK.create(Mockito.anyString(), Mockito.eq(new byte[]{}), Mockito.anyList(), Mockito.eq(CreateMode.PERSISTENT))).thenThrow(ke); elector.ensureParentZNode(); StringBuilder prefix = new StringBuilder(); for (String part : ZK_PARENT_NAME.split("/")) { if (part.isEmpty()) continue; prefix.append("/").append(part); if (!"/".equals(prefix.toString())) { Mockito.verify(mockZK).getACL(Mockito.eq(prefix.toString()), Mockito.eq(new Stat())); Mockito.verify(mockZK).setACL(Mockito.eq(prefix.toString()), Mockito.eq(Ids.OPEN_ACL_UNSAFE), Mockito.anyInt()); } } } /** * Test for a bug encountered during development of HADOOP-8163: * ensureBaseNode() should throw an exception if it has to retry * more than 3 times to create any part of the path. */ @Test public void testEnsureBaseNodeFails() throws Exception { Mockito.doThrow(new KeeperException.ConnectionLossException()) .when(mockZK).create( Mockito.eq(ZK_PARENT_NAME), Mockito.<byte[]>any(), Mockito.eq(Ids.OPEN_ACL_UNSAFE), Mockito.eq(CreateMode.PERSISTENT)); try { elector.ensureParentZNode(); fail("Did not throw!"); } catch (IOException ioe) { if (!(ioe.getCause() instanceof KeeperException.ConnectionLossException)) { throw ioe; } } // Should have tried three times Mockito.verify(mockZK, Mockito.times(3)).create( Mockito.eq(ZK_PARENT_NAME), Mockito.<byte[]>any(), Mockito.eq(Ids.OPEN_ACL_UNSAFE), Mockito.eq(CreateMode.PERSISTENT)); } /** * verify the zookeeper connection establishment */ @Test public void testWithoutZKServer() throws Exception { try { new ActiveStandbyElector("127.0.0.1", 2000, ZK_PARENT_NAME, Ids.OPEN_ACL_UNSAFE, Collections.<ZKAuthInfo> emptyList(), mockApp, CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT, null) { @Override protected ZooKeeper createZooKeeper() throws IOException { return Mockito.mock(ZooKeeper.class); } }; fail("Did not throw zookeeper connection loss exceptions!"); } catch (KeeperException ke) { GenericTestUtils.assertExceptionContains( "ConnectionLoss", ke); } } /** * joinElection(..) should happen only after SERVICE_HEALTHY. */ @Test public void testBecomeActiveBeforeServiceHealthy() throws Exception { mockNoPriorActive(); WatchedEvent mockEvent = Mockito.mock(WatchedEvent.class); Mockito.when(mockEvent.getType()).thenReturn(Event.EventType.None); // session expired should enter safe mode // But for first time, before the SERVICE_HEALTY i.e. appData is set, // should not enter the election. Mockito.when(mockEvent.getState()).thenReturn(Event.KeeperState.Expired); elector.processWatchEvent(mockZK, mockEvent); // joinElection should not be called. Mockito.verify(mockZK, Mockito.times(0)).create(ZK_LOCK_NAME, null, Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL, elector, mockZK); } /** * We want to test if we create an ActiveStandbyElector with null as a TruststoreKeystore, * then we are creating a ZooKeeper without the SSL configs in ActiveStandbyElector and the other * configs are the same as the default values. * We do this by checking the ZKClientConfig properties. * @throws Exception */ @Test public void testWithoutTruststoreKeystore() throws Exception { ZKClientConfig defaultConfig = new ZKClientConfig(); ClientX509Util clientX509Util = new ClientX509Util(); System.out.println(defaultConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); ActiveStandbyElector e = Mockito.spy(new ActiveStandbyElector("localhost", 1, "", Collections.emptyList(), null, Mockito.mock(ActiveStandbyElectorCallback.class), 1, null) { @Override protected synchronized ZooKeeper connectToZooKeeper() { return null; } }); e.createZooKeeper(); ArgumentCaptor<ZKClientConfig> configArgumentCaptor = ArgumentCaptor.forClass(ZKClientConfig.class); Mockito.verify(e).initiateZookeeper(configArgumentCaptor.capture()); ZKClientConfig clientConfig = configArgumentCaptor.getValue(); assertEquals(defaultConfig.getProperty(ZKClientConfig.SECURE_CLIENT), clientConfig.getProperty(ZKClientConfig.SECURE_CLIENT)); assertEquals(defaultConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET), clientConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); assertNull(clientConfig.getProperty(clientX509Util.getSslKeystoreLocationProperty())); assertNull(clientConfig.getProperty(clientX509Util.getSslKeystorePasswdProperty())); assertNull(clientConfig.getProperty(clientX509Util.getSslTruststoreLocationProperty())); assertNull(clientConfig.getProperty(clientX509Util.getSslTruststorePasswdProperty())); } /** * We want to test if we create an ActiveStandbyElector with a TruststoreKeystore, which already * has the SSL configuration set, then we are creating a ZooKeeper with the correct SSL configs * in ActiveStandbyElector. We do this by checking the ZKClientConfig properties. * @throws Exception */ @Test public void testWithTruststoreKeystore() throws Exception { Configuration conf = new Configuration(); ClientX509Util clientX509Util = new ClientX509Util(); conf.set(CommonConfigurationKeys.ZK_SSL_KEYSTORE_LOCATION, "keystore_location"); conf.set(CommonConfigurationKeys.ZK_SSL_KEYSTORE_PASSWORD, "keystore_password"); conf.set(CommonConfigurationKeys.ZK_SSL_TRUSTSTORE_LOCATION, "truststore_location"); conf.set(CommonConfigurationKeys.ZK_SSL_TRUSTSTORE_PASSWORD, "truststore_password"); SecurityUtil.TruststoreKeystore truststoreKeystore = new SecurityUtil.TruststoreKeystore(conf); ActiveStandbyElector e = Mockito.spy(new ActiveStandbyElector("localhost", 1, "", Collections.emptyList(), null, Mockito.mock(ActiveStandbyElectorCallback.class), 1, truststoreKeystore) { @Override protected synchronized ZooKeeper connectToZooKeeper() { return null; } }); e.createZooKeeper(); ArgumentCaptor<ZKClientConfig> configArgumentCaptor = ArgumentCaptor.forClass(ZKClientConfig.class); Mockito.verify(e).initiateZookeeper(configArgumentCaptor.capture()); ZKClientConfig clientConfig = configArgumentCaptor.getValue(); assertEquals("true", clientConfig.getProperty(ZKClientConfig.SECURE_CLIENT)); assertEquals("org.apache.zookeeper.ClientCnxnSocketNetty", clientConfig.getProperty(ZKClientConfig.ZOOKEEPER_CLIENT_CNXN_SOCKET)); assertEquals("keystore_location", clientConfig.getProperty(clientX509Util.getSslKeystoreLocationProperty())); assertEquals("keystore_password", clientConfig.getProperty(clientX509Util.getSslKeystorePasswdProperty())); assertEquals("truststore_location", clientConfig.getProperty(clientX509Util.getSslTruststoreLocationProperty())); assertEquals("truststore_password", clientConfig.getProperty(clientX509Util.getSslTruststorePasswdProperty())); } }
oracle/coherence
35,014
prj/coherence-core-components/src/main/java/com/tangosol/coherence/component/net/packet/MessagePacket.java
/* * Copyright (c) 2000, 2023, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ // ---- class: com.tangosol.coherence.component.net.packet.MessagePacket package com.tangosol.coherence.component.net.packet; import com.tangosol.coherence.component.net.MemberSet; import com.tangosol.coherence.component.net.memberSet.actualMemberSet.serviceMemberSet.MasterMemberSet; import com.tangosol.io.ReadBuffer; import com.tangosol.util.Base; import java.io.IOException; import java.sql.Time; /** * A Packet is a unit of data transmission between the Members of the network. * There are five different type of Packets: Broadcast, Directed, Sequel, * Request and Ack. * * A Message Packet represents a whole or a part of a Message. A Message could * be sent to a number of recipient Members. There are Broadcast and "point" * Message Packets depending on the intended recipients of the Message. * * If the Message is being sent to all Members or potential Members, it uses a * Broadcast Packet, which is unaddressed (aka no "to" Member id information, * and it may not even have a return address, aka a "from" Member id, if the * sender has not been assigned a Member id by the cluster). A Message the is * formatted into a Broadcast Packet must fit entirely in one Packet. * * There are also "point" Message Packets, which come from this Member and go * to one or more addressed Members (recipients). For each recipient, there is * a sequential counter which is unique in the scope of the sender/recipient * pair that a Message is marked with and for each sender there is a global * sequential counter that an outgoing Message is marked with. These counters * are used to quickly identify incoming point Message Packets, acknowledge * them, and determine if any prerequisite point Packets are missing. * * There are two types of "point" Message Packets: Directed and Sequel. Each * "point" Message is formatted into one Directed Packet and zero or more * Sequel Packets. The Directed Packet carries the Message-describing * information and the first chunk of Message data, and the Sequel Packets * carry any additional chunks of Message data that did not fit into the * Directed Packet. * * A recipient of "point" Message Packets is responsible for acknowledging the * receipt of those Packets. The Ack Packet is sent back to the sender to * acknowledge one or more "point" Message Packets. * * If a recipient determines that it missed a "point" Message Packet, it can * send a Request Packet to tell the sender of the "point" Message Packet that * the "point" Message Packet was never received and is being waited upon by * the recipient. * * The Ack and Request Packets are referred to as Notify Packets because they * are used by one Member to Notify another Member of Packet communication * success and failure. */ @SuppressWarnings({"deprecation", "rawtypes", "unused", "unchecked", "ConstantConditions", "DuplicatedCode", "ForLoopReplaceableByForEach", "IfCanBeSwitch", "RedundantArrayCreation", "RedundantSuppression", "SameParameterValue", "TryFinallyCanBeTryWithResources", "TryWithIdenticalCatches", "UnnecessaryBoxing", "UnnecessaryUnboxing", "UnusedAssignment"}) public abstract class MessagePacket extends com.tangosol.coherence.component.net.Packet implements com.tangosol.net.internal.PacketIdentifier, Cloneable { // ---- Fields declarations ---- /** * Property BodyLength * * The length in bytes of this Packet's body. */ private int __m_BodyLength; /** * Property ByteBuffer * * Assigned only on incomig MessagePackets. The ByteBuffer retains one MTU * size worth of a Messages body. The ByteBuffer is released back to the * BufferManager which it was acquired from when the complete Message has * been received. */ private transient java.nio.ByteBuffer __m_ByteBuffer; /** * Property DELIVERY_CONFIRMED * * Indicates that all recipients of the packet have acknowledged it. */ public static final int DELIVERY_CONFIRMED = 4; /** * Property DELIVERY_DEFERRED * * Indicates that a packet will be delayed in being sent. */ public static final int DELIVERY_DEFERRED = 2; /** * Property DELIVERY_LOST * * Indicates that the packet was already sent but was not confirmed before * reaching its scheduled resend time. */ public static final int DELIVERY_LOST = 3; /** * Property DELIVERY_OUTSTANDING * * Indicates that a packet has been sent but not yet confirmed or lost. */ public static final int DELIVERY_OUTSTANDING = 1; /** * Property DELIVERY_UNSENT * * The initial delivery state of a packet upon creation. */ public static final int DELIVERY_UNSENT = 0; /** * Property DeliveryState * * The delivery state of the packet, only applicable to outgoing packets. * Only maintained when flow control is enabled. */ private transient int __m_DeliveryState; /** * Property FromMessageId * * A Directed or Sequel Packet represents a whole or a part of a Message. * Each sender maintains a global sequential number that every outgoing * Message is marked with (except for Broadcast Messages). This property * represents the sender specific Message id for this packet or zero if the * Message id is not applicable. Prior to 3.2.2 the receiver only held the * trint representation for this value. */ private long __m_FromMessageId; /** * Property MessagePartCount * * Specifies the number of Packet components that compose the Message to * which this Packet belongs. * Broadcast: 1 (Broadcast does not support Sequel Packets) * Directed: 1 or greater (the first will be a Directed Packet, all others * will be Sequel Packets) * Sequel: Always more than one (otherwise no need for a Sequel Packet) * * Note that incoming Sequel cannot determine this property until it is * part of a Message (i.e. until Message property is set) */ private int __m_MessagePartCount; /** * Property MessagePartIndex * * Specifies an zero-based index of this Packet within the multi-Packet * Message. The value is only applicable (i.e. non-zero) for Sequel * Packets. */ private int __m_MessagePartIndex; /** * Property MessageType * * Specifies the type of the Message that will be constructed from this * Packet. Only Directed (and thus Sequel) and Broadcast Packets form * Message objects. */ private int __m_MessageType; /** * Property NackInProgress * * Indicates that the packet has been Nackd and requires immediate resend. * Once the packet comes off the head of the resend queue this property is * cleared. * * May only be accessed while synchronized on the resend queue. */ private transient boolean __m_NackInProgress; /** * Property PendingResendSkips * * The number of times the packet needs to be skipped from processing by * the resend queue. * * This property is reserved for use by the Publisher thread. */ private transient int __m_PendingResendSkips; /** * Property ReadBuffer * * Only assigned on outgoig MessagePackets by defineBufferView. The * ReadBuffer is a view into the region this MessagePacket corresponds to. */ private com.tangosol.io.ReadBuffer __m_ReadBuffer; /** * Property ResendScheduled * * This property is reserved for use by the PacketPublisher. The * ResendScheduled value is the date/time (in millis) at which the Packet * (ConfirmationRequired=true) will be resent if a confirmation for the * Packet has not been received. */ private long __m_ResendScheduled; /** * Property ResendTimeout * * This property is reserved for use by the PacketPublisher. The * ResendTimeout value is the date/time (in millis) at which the Packet * (ConfirmationRequired=true) will stop being resent even if a * confirmation for the Packet has not been received, and the Members that * have not acknowledged the Packet will be assumed to be dead. */ private long __m_ResendTimeout; /** * Property ServiceId * * Specifies the Service to which the assembled Message will go (or from * which the Message that was disassembled into this Packet came). */ private int __m_ServiceId; /** * Property ToMemberSet * * Used for outgoing Packets only. Set of Members still to deliver to. Use * of ToMemberSet and ToId properties are either/or (exclusive). * * Presence of a non-null value is used to identify that the packet started * as a Multipoint packet (see isOutgoingMultipoint). Therefore this * property should never be nulled out, but cleared instead. */ private com.tangosol.coherence.component.net.memberSet.DependentMemberSet __m_ToMemberSet; /** * Property TYPE_NO_DESTINATION * * There is no remaining members which to send this packet. */ public static final int TYPE_NO_DESTINATION = 0; // Initializing constructor public MessagePacket(String sName, com.tangosol.coherence.Component compParent, boolean fInit) { super(sName, compParent, false); } // Private initializer protected void __initPrivate() { super.__initPrivate(); } //++ getter for static property _CLASS /** * Getter for property _CLASS.<p> * Property with auto-generated accessor that returns the Class object for a * given component. */ public static Class get_CLASS() { Class clz; try { clz = Class.forName("com.tangosol.coherence/component/net/packet/MessagePacket".replace('/', '.')); } catch (ClassNotFoundException e) { throw new NoClassDefFoundError(e.getMessage()); } return clz; } //++ getter for autogen property _Module /** * This is an auto-generated method that returns the global [design time] * parent component. * * Note: the class generator will ignore any custom implementation for this * behavior. */ private com.tangosol.coherence.Component get_Module() { return this; } /** * Similar to _assert(), but IOException is thrown. */ public static void assertIO(boolean fCondition) throws java.io.IOException { // import java.io.IOException; if (!fCondition) { throw new IOException(); } } /** * @see Directed#selectType * @see Sequel#selectType */ public static int calcBodyLength(int cbHeader, int cbPref, int cbMax) { return Math.min(Math.max(cbHeader << 2, cbPref), // desired size Math.max(cbPref, cbMax)) // the largest packet we can exchange - cbHeader; } /** * Compute the maximum number of members (member ids) that can be encoded in * a packet of a given size. This is a pessimistic calculation and * intentionally includes recycled members within the limit. * * @see calcBodyLength * @see Directed#selectType * @see Sequel#selectType */ public static int calcMaxMembers(int cb) { // import Component.Net.MemberSet.ActualMemberSet.ServiceMemberSet.MasterMemberSet; // the limit is based on sending a DIRECTED_MANY packet // starts with a fix cost // followed a MANY encoded memberset // encoded in 4 byte increments each representing up to 32 members // followed by N trints (3 bytes each) (one for each member in the memberset) cb -= 21; // fixed cost of DIRECTED_MANY packet // computing the number of members which can be encoded is done in two stages: // stage 1: compute the number of members which can be encoded using fully // populated 4B words, and the corresponding trints. These are blocks of 100B. int cMembers = (cb / 100) * 32; // stage 2: add in the number of members which can be used with the remainder cb = (cb % 100); if (cb >= 7) { // there is room for another word and some trints cMembers += (cb - 4) / 3; } return Math.min(MasterMemberSet.MAX_MEMBERS, cMembers); } /** * Remove all recipients for this packet. */ public void clearRecipients() { // import com.tangosol.util.Base; if (isOutgoingMultipoint()) { getToMemberSet().clear(); } else { setToId(0); } setSentMillis(Base.getSafeTimeMillis()); } // Declared at the super level /** * Is used to report undeliverable Packets. */ public Object clone() { // import com.tangosol.util.Base; // import com.tangosol.io.ReadBuffer; // since clone is used exclusively for outgoing MessagePackets, // ByteBuffer should be null as it is only used by incoming MessagePackets _assert(getByteBuffer() == null); MessagePacket packet; try { packet = (MessagePacket) super.clone(); } catch (CloneNotSupportedException e) { throw Base.ensureRuntimeException(e); } ReadBuffer readBuffer = getReadBuffer(); if (readBuffer != null) { packet.setReadBuffer((ReadBuffer) readBuffer.clone()); } return packet; } /** * Defines the buffer and the region where the body of this packet resides. * * @param buffer the ReadBuffer which defines this packet * @param of the offset within the ReadBuffer where this packet begins * @param cb the length of this packet */ public void defineBufferView(com.tangosol.io.ReadBuffer buffer, int of, int cb) { setBodyLength(cb); setReadBuffer(buffer.getReadBuffer(of, cb)); } // Declared at the super level public boolean equals(Object obj) { // two Packets are considered equal if they have the same from // Member id, from Message id, and MessagePartIndex if (obj instanceof MessagePacket) { MessagePacket that = (MessagePacket) obj; return this.getFromId() == that.getFromId() && this.getFromMessageId() == that.getFromMessageId() && this.getMessagePartIndex() == that.getMessagePartIndex(); } return false; } public String formatDeliveryState(int nDeliveryState) { switch (nDeliveryState) { case DELIVERY_UNSENT: return "unsent"; case DELIVERY_OUTSTANDING: return "outstanding"; case DELIVERY_DEFERRED : return "deferred"; case DELIVERY_LOST : return "lost"; case DELIVERY_CONFIRMED : return "confirmed"; default: return "<unknown>"; } } // Accessor for the property "BodyLength" /** * Getter for property BodyLength.<p> * The length in bytes of this Packet's body. */ public int getBodyLength() { return __m_BodyLength; } // Accessor for the property "ByteBuffer" /** * Getter for property ByteBuffer.<p> * Assigned only on incomig MessagePackets. The ByteBuffer retains one MTU * size worth of a Messages body. The ByteBuffer is released back to the * BufferManager which it was acquired from when the complete Message has * been received. */ public java.nio.ByteBuffer getByteBuffer() { return __m_ByteBuffer; } // Accessor for the property "DeliveryState" /** * Getter for property DeliveryState.<p> * The delivery state of the packet, only applicable to outgoing packets. * Only maintained when flow control is enabled. */ public int getDeliveryState() { return __m_DeliveryState; } // From interface: com.tangosol.net.internal.PacketIdentifier // Accessor for the property "FromMessageId" /** * Getter for property FromMessageId.<p> * A Directed or Sequel Packet represents a whole or a part of a Message. * Each sender maintains a global sequential number that every outgoing * Message is marked with (except for Broadcast Messages). This property * represents the sender specific Message id for this packet or zero if the * Message id is not applicable. Prior to 3.2.2 the receiver only held the * trint representation for this value. */ public long getFromMessageId() { return __m_FromMessageId; } // Accessor for the property "HeaderLength" /** * Getter for property HeaderLength.<p> * The length of the packet's header in bytes. */ public int getHeaderLength() { return 0; } // Declared at the super level /** * Getter for property Length.<p> * The maximum size of the packet in serialized form. Used to determine * bundling of individual packets into the same PacketBundle. * * The number of actual bytes may be less than this as member are departing * the cluster. */ public int getLength() { return getHeaderLength() + getBodyLength(); } // Accessor for the property "MessagePartCount" /** * Getter for property MessagePartCount.<p> * Specifies the number of Packet components that compose the Message to * which this Packet belongs. * Broadcast: 1 (Broadcast does not support Sequel Packets) * Directed: 1 or greater (the first will be a Directed Packet, all others * will be Sequel Packets) * Sequel: Always more than one (otherwise no need for a Sequel Packet) * * Note that incoming Sequel cannot determine this property until it is part * of a Message (i.e. until Message property is set) */ public int getMessagePartCount() { return __m_MessagePartCount; } // From interface: com.tangosol.net.internal.PacketIdentifier // Accessor for the property "MessagePartIndex" /** * Getter for property MessagePartIndex.<p> * Specifies an zero-based index of this Packet within the multi-Packet * Message. The value is only applicable (i.e. non-zero) for Sequel Packets. */ public int getMessagePartIndex() { return __m_MessagePartIndex; } // Accessor for the property "MessageType" /** * Getter for property MessageType.<p> * Specifies the type of the Message that will be constructed from this * Packet. Only Directed (and thus Sequel) and Broadcast Packets form * Message objects. */ public int getMessageType() { return __m_MessageType; } // Accessor for the property "PendingResendSkips" /** * Getter for property PendingResendSkips.<p> * The number of times the packet needs to be skipped from processing by the * resend queue. * * This property is reserved for use by the Publisher thread. */ public int getPendingResendSkips() { return __m_PendingResendSkips; } // Accessor for the property "ReadBuffer" /** * Get the ReadBuffer for this Packet. The ReadBuffer will represent the * region within the WriteBuffer from which the content of this Packet can * be read. */ public com.tangosol.io.ReadBuffer getReadBuffer() { return __m_ReadBuffer; } // Accessor for the property "ResendScheduled" /** * Getter for property ResendScheduled.<p> * This property is reserved for use by the PacketPublisher. The * ResendScheduled value is the date/time (in millis) at which the Packet * (ConfirmationRequired=true) will be resent if a confirmation for the * Packet has not been received. */ public long getResendScheduled() { return __m_ResendScheduled; } // Accessor for the property "ResendTimeout" /** * Getter for property ResendTimeout.<p> * This property is reserved for use by the PacketPublisher. The * ResendTimeout value is the date/time (in millis) at which the Packet * (ConfirmationRequired=true) will stop being resent even if a confirmation * for the Packet has not been received, and the Members that have not * acknowledged the Packet will be assumed to be dead. */ public long getResendTimeout() { return __m_ResendTimeout; } // Accessor for the property "ServiceId" /** * Getter for property ServiceId.<p> * Specifies the Service to which the assembled Message will go (or from * which the Message that was disassembled into this Packet came). */ public int getServiceId() { return __m_ServiceId; } // Accessor for the property "ToMemberSet" /** * Getter for property ToMemberSet.<p> * Used for outgoing Packets only. Set of Members still to deliver to. Use * of ToMemberSet and ToId properties are either/or (exclusive). * * Presence of a non-null value is used to identify that the packet started * as a Multipoint packet (see isOutgoingMultipoint). Therefore this * property should never be nulled out, but cleared instead. */ public com.tangosol.coherence.component.net.memberSet.DependentMemberSet getToMemberSet() { return __m_ToMemberSet; } // Declared at the super level public int hashCode() { return getFromId() ^ ((int) getFromMessageId()) ^ getMessagePartIndex(); } // Declared at the super level /** * Check if the packet is still addressed to the specified member Id. Once * the packet has been ack'd by a member this will return false. */ public boolean isAddressedTo(int nMemberId) { // import Component.Net.MemberSet; if (super.isAddressedTo(nMemberId)) { return true; } MemberSet memberSet = getToMemberSet(); return memberSet != null && memberSet.contains(nMemberId); } // Accessor for the property "NackInProgress" /** * Getter for property NackInProgress.<p> * Indicates that the packet has been Nackd and requires immediate resend. * Once the packet comes off the head of the resend queue this property is * cleared. * * May only be accessed while synchronized on the resend queue. */ public boolean isNackInProgress() { return __m_NackInProgress; } // Declared at the super level /** * Getter for property OutgoingMultipoint.<p> * True if the Packet may have multiple Members to which it is addressed. * (Note: False for Broadcast, which is not addressed.) * * This property is only true for Message Packets that have a ToMemberSet. * * Should be used only if Outgoing is set. */ public boolean isOutgoingMultipoint() { return getToMemberSet() != null; } // Accessor for the property "ResendNecessary" /** * Getter for property ResendNecessary.<p> * ResendNecessary evaluates to true until the MessagePacket has been * acknowledged by all of the recipients (or those recipients have left the * cluster). */ public boolean isResendNecessary() { // import Component.Net.MemberSet; if (getToId() != 0) { return true; } MemberSet set = getToMemberSet(); return set != null && !set.isEmpty(); } public boolean registerAck(com.tangosol.coherence.component.net.Member memberFrom) { // import Component.Net.MemberSet; // import java.sql.Time; if (!isOutgoing()) { // debugging to help track down cause of COH-13095 long lSentMillis = getSentMillis(); long lRecvMillis = getReceivedMillis(); _trace("Received ACK from " + memberFrom + " for incomming packet, with sent count of " + getSentCount() + ", last send timestamp of " + lSentMillis + "(" + new Time(lSentMillis) + "), recv timestamp of " + lRecvMillis + "(" + new Time(lRecvMillis) + "): " + this, 1); _assert(isOutgoing()); // original assert } MemberSet set = getToMemberSet(); if (set == null) { int nToId = getToId(); int nFromId = memberFrom.getId(); if (nToId != 0) { if (nFromId != nToId) { // debugging to help track down cause of COH-13095 long lSentMillis = getSentMillis(); long lRecvMillis = getReceivedMillis(); _trace("Received ACK from " + memberFrom + " " + nFromId + " for packet sent to " + nToId + " , with sent count of " + getSentCount() + ", last send timestamp of " + lSentMillis + "(" + new Time(lSentMillis) + "), recv timestamp of " + lRecvMillis + "(" + new Time(lRecvMillis) + "): " + this, 1); _assert(nFromId == nToId); // original assert } setToId(0); return true; } return false; } else { return set.remove(memberFrom); } } // Accessor for the property "BodyLength" /** * Setter for property BodyLength.<p> * The length in bytes of this Packet's body. */ protected void setBodyLength(int cbBody) { __m_BodyLength = cbBody; } // Accessor for the property "ByteBuffer" /** * Setter for property ByteBuffer.<p> * Assigned only on incomig MessagePackets. The ByteBuffer retains one MTU * size worth of a Messages body. The ByteBuffer is released back to the * BufferManager which it was acquired from when the complete Message has * been received. */ public void setByteBuffer(java.nio.ByteBuffer buffer) { __m_ByteBuffer = buffer; } // Accessor for the property "DeliveryState" /** * Setter for property DeliveryState.<p> * The delivery state of the packet, only applicable to outgoing packets. * Only maintained when flow control is enabled. */ protected void setDeliveryState(int nState) { __m_DeliveryState = nState; } // Accessor for the property "DeliveryState" /** * Helper method for setting the delivery state of a packet, for the * specified member, only called on the Publisher thread. */ public void setDeliveryState(int nNewState, com.tangosol.coherence.component.net.Member member) { // import Component.Net.Member$FlowControl as com.tangosol.coherence.component.net.Member.FlowControl; int nCurrentState = getDeliveryState(); if (nNewState == nCurrentState) { return; } com.tangosol.coherence.component.net.Member.FlowControl flowControl = member.getFlowControl(); if (flowControl != null) { switch (nCurrentState) { case DELIVERY_OUTSTANDING: flowControl.setOutstandingPacketCount(flowControl.getOutstandingPacketCount() - 1); break; case DELIVERY_DEFERRED: flowControl.setDeferredPacketCount(flowControl.getDeferredPacketCount() - 1); break; } switch (nNewState) { case DELIVERY_OUTSTANDING: flowControl.setOutstandingPacketCount(flowControl.getOutstandingPacketCount() + 1); break; case DELIVERY_DEFERRED: flowControl.setDeferredPacketCount(flowControl.getDeferredPacketCount() + 1); break; } } setDeliveryState(nNewState); } // Accessor for the property "FromMessageId" /** * Setter for property FromMessageId.<p> * A Directed or Sequel Packet represents a whole or a part of a Message. * Each sender maintains a global sequential number that every outgoing * Message is marked with (except for Broadcast Messages). This property * represents the sender specific Message id for this packet or zero if the * Message id is not applicable. Prior to 3.2.2 the receiver only held the * trint representation for this value. */ public void setFromMessageId(long nId) { __m_FromMessageId = nId; } // Accessor for the property "MessagePartCount" /** * Setter for property MessagePartCount.<p> * Specifies the number of Packet components that compose the Message to * which this Packet belongs. * Broadcast: 1 (Broadcast does not support Sequel Packets) * Directed: 1 or greater (the first will be a Directed Packet, all others * will be Sequel Packets) * Sequel: Always more than one (otherwise no need for a Sequel Packet) * * Note that incoming Sequel cannot determine this property until it is part * of a Message (i.e. until Message property is set) */ public void setMessagePartCount(int cParts) { __m_MessagePartCount = cParts; } // Accessor for the property "MessagePartIndex" /** * Setter for property MessagePartIndex.<p> * Specifies an zero-based index of this Packet within the multi-Packet * Message. The value is only applicable (i.e. non-zero) for Sequel Packets. */ public void setMessagePartIndex(int i) { __m_MessagePartIndex = i; } // Accessor for the property "MessageType" /** * Setter for property MessageType.<p> * Specifies the type of the Message that will be constructed from this * Packet. Only Directed (and thus Sequel) and Broadcast Packets form * Message objects. */ public void setMessageType(int nType) { __m_MessageType = nType; } // Accessor for the property "NackInProgress" /** * Setter for property NackInProgress.<p> * Indicates that the packet has been Nackd and requires immediate resend. * Once the packet comes off the head of the resend queue this property is * cleared. * * May only be accessed while synchronized on the resend queue. */ public void setNackInProgress(boolean fNack) { __m_NackInProgress = fNack; } // Accessor for the property "PendingResendSkips" /** * Setter for property PendingResendSkips.<p> * The number of times the packet needs to be skipped from processing by the * resend queue. * * This property is reserved for use by the Publisher thread. */ public void setPendingResendSkips(int pPendingResendSkips) { __m_PendingResendSkips = pPendingResendSkips; } // Accessor for the property "ReadBuffer" /** * Setter for property ReadBuffer.<p> * Only assigned on outgoig MessagePackets by defineBufferView. The * ReadBuffer is a view into the region this MessagePacket corresponds to. */ protected void setReadBuffer(com.tangosol.io.ReadBuffer buffer) { __m_ReadBuffer = buffer; } // Accessor for the property "ResendScheduled" /** * Setter for property ResendScheduled.<p> * This property is reserved for use by the PacketPublisher. The * ResendScheduled value is the date/time (in millis) at which the Packet * (ConfirmationRequired=true) will be resent if a confirmation for the * Packet has not been received. */ public void setResendScheduled(long cMillis) { __m_ResendScheduled = cMillis; } // Accessor for the property "ResendTimeout" /** * Setter for property ResendTimeout.<p> * This property is reserved for use by the PacketPublisher. The * ResendTimeout value is the date/time (in millis) at which the Packet * (ConfirmationRequired=true) will stop being resent even if a confirmation * for the Packet has not been received, and the Members that have not * acknowledged the Packet will be assumed to be dead. */ public void setResendTimeout(long pResendTimeout) { __m_ResendTimeout = pResendTimeout; } // Accessor for the property "ServiceId" /** * Setter for property ServiceId.<p> * Specifies the Service to which the assembled Message will go (or from * which the Message that was disassembled into this Packet came). */ public void setServiceId(int nId) { __m_ServiceId = nId; } // Accessor for the property "ToMemberSet" /** * Setter for property ToMemberSet.<p> * Used for outgoing Packets only. Set of Members still to deliver to. Use * of ToMemberSet and ToId properties are either/or (exclusive). * * Presence of a non-null value is used to identify that the packet started * as a Multipoint packet (see isOutgoingMultipoint). Therefore this * property should never be nulled out, but cleared instead. */ public void setToMemberSet(com.tangosol.coherence.component.net.memberSet.DependentMemberSet setMember) { __m_ToMemberSet = setMember; } }
apache/hadoop-common
35,086
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.balancer; import static org.apache.hadoop.hdfs.protocolPB.PBHelper.vintPrefixed; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.hdfs.protocol.datatransfer.Sender; import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver; import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil; import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.server.balancer.Dispatcher.DDatanode.StorageGroup; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.HostsFileReader; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import com.google.common.base.Preconditions; /** Dispatching block replica moves between datanodes. */ @InterfaceAudience.Private public class Dispatcher { static final Log LOG = LogFactory.getLog(Dispatcher.class); private static final long GB = 1L << 30; // 1GB private static final long MAX_BLOCKS_SIZE_TO_FETCH = 2 * GB; private static final int MAX_NO_PENDING_MOVE_ITERATIONS = 5; private static final long DELAY_AFTER_ERROR = 10 * 1000L; // 10 seconds private final NameNodeConnector nnc; private final SaslDataTransferClient saslClient; /** Set of datanodes to be excluded. */ private final Set<String> excludedNodes; /** Restrict to the following nodes. */ private final Set<String> includedNodes; private final Collection<Source> sources = new HashSet<Source>(); private final Collection<StorageGroup> targets = new HashSet<StorageGroup>(); private final GlobalBlockMap globalBlocks = new GlobalBlockMap(); private final MovedBlocks<StorageGroup> movedBlocks; /** Map (datanodeUuid,storageType -> StorageGroup) */ private final StorageGroupMap storageGroupMap = new StorageGroupMap(); private NetworkTopology cluster; private final ExecutorService moveExecutor; private final ExecutorService dispatchExecutor; /** The maximum number of concurrent blocks moves at a datanode */ private final int maxConcurrentMovesPerNode; private final AtomicLong bytesMoved = new AtomicLong(); private static class GlobalBlockMap { private final Map<Block, DBlock> map = new HashMap<Block, DBlock>(); /** * Get the block from the map; * if the block is not found, create a new block and put it in the map. */ private DBlock get(Block b) { DBlock block = map.get(b); if (block == null) { block = new DBlock(b); map.put(b, block); } return block; } /** Remove all blocks except for the moved blocks. */ private void removeAllButRetain(MovedBlocks<StorageGroup> movedBlocks) { for (Iterator<Block> i = map.keySet().iterator(); i.hasNext();) { if (!movedBlocks.contains(i.next())) { i.remove(); } } } } static class StorageGroupMap { private static String toKey(String datanodeUuid, StorageType storageType) { return datanodeUuid + ":" + storageType; } private final Map<String, StorageGroup> map = new HashMap<String, StorageGroup>(); StorageGroup get(String datanodeUuid, StorageType storageType) { return map.get(toKey(datanodeUuid, storageType)); } void put(StorageGroup g) { final String key = toKey(g.getDatanodeInfo().getDatanodeUuid(), g.storageType); final StorageGroup existing = map.put(key, g); Preconditions.checkState(existing == null); } int size() { return map.size(); } void clear() { map.clear(); } } /** This class keeps track of a scheduled block move */ private class PendingMove { private DBlock block; private Source source; private DDatanode proxySource; private StorageGroup target; private PendingMove() { } @Override public String toString() { final Block b = block.getBlock(); return b + " with size=" + b.getNumBytes() + " from " + source.getDisplayName() + " to " + target.getDisplayName() + " through " + proxySource.datanode; } /** * Choose a block & a proxy source for this pendingMove whose source & * target have already been chosen. * * @return true if a block and its proxy are chosen; false otherwise */ private boolean chooseBlockAndProxy() { // iterate all source's blocks until find a good one for (Iterator<DBlock> i = source.getBlockIterator(); i.hasNext();) { if (markMovedIfGoodBlock(i.next())) { i.remove(); return true; } } return false; } /** * @return true if the given block is good for the tentative move. */ private boolean markMovedIfGoodBlock(DBlock block) { synchronized (block) { synchronized (movedBlocks) { if (isGoodBlockCandidate(source, target, block)) { this.block = block; if (chooseProxySource()) { movedBlocks.put(block); if (LOG.isDebugEnabled()) { LOG.debug("Decided to move " + this); } return true; } } } } return false; } /** * Choose a proxy source. * * @return true if a proxy is found; otherwise false */ private boolean chooseProxySource() { final DatanodeInfo targetDN = target.getDatanodeInfo(); // if node group is supported, first try add nodes in the same node group if (cluster.isNodeGroupAware()) { for (StorageGroup loc : block.getLocations()) { if (cluster.isOnSameNodeGroup(loc.getDatanodeInfo(), targetDN) && addTo(loc)) { return true; } } } // check if there is replica which is on the same rack with the target for (StorageGroup loc : block.getLocations()) { if (cluster.isOnSameRack(loc.getDatanodeInfo(), targetDN) && addTo(loc)) { return true; } } // find out a non-busy replica for (StorageGroup loc : block.getLocations()) { if (addTo(loc)) { return true; } } return false; } /** add to a proxy source for specific block movement */ private boolean addTo(StorageGroup g) { final DDatanode dn = g.getDDatanode(); if (dn.addPendingBlock(this)) { proxySource = dn; return true; } return false; } /** Dispatch the move to the proxy source & wait for the response. */ private void dispatch() { if (LOG.isDebugEnabled()) { LOG.debug("Start moving " + this); } Socket sock = new Socket(); DataOutputStream out = null; DataInputStream in = null; try { sock.connect( NetUtils.createSocketAddr(target.getDatanodeInfo().getXferAddr()), HdfsServerConstants.READ_TIMEOUT); sock.setKeepAlive(true); OutputStream unbufOut = sock.getOutputStream(); InputStream unbufIn = sock.getInputStream(); ExtendedBlock eb = new ExtendedBlock(nnc.getBlockpoolID(), block.getBlock()); final KeyManager km = nnc.getKeyManager(); Token<BlockTokenIdentifier> accessToken = km.getAccessToken(eb); IOStreamPair saslStreams = saslClient.socketSend(sock, unbufOut, unbufIn, km, accessToken, target.getDatanodeInfo()); unbufOut = saslStreams.out; unbufIn = saslStreams.in; out = new DataOutputStream(new BufferedOutputStream(unbufOut, HdfsConstants.IO_FILE_BUFFER_SIZE)); in = new DataInputStream(new BufferedInputStream(unbufIn, HdfsConstants.IO_FILE_BUFFER_SIZE)); sendRequest(out, eb, accessToken); receiveResponse(in); bytesMoved.addAndGet(block.getNumBytes()); LOG.info("Successfully moved " + this); } catch (IOException e) { LOG.warn("Failed to move " + this + ": " + e.getMessage()); // Proxy or target may have some issues, delay before using these nodes // further in order to avoid a potential storm of "threads quota // exceeded" warnings when the dispatcher gets out of sync with work // going on in datanodes. proxySource.activateDelay(DELAY_AFTER_ERROR); target.getDDatanode().activateDelay(DELAY_AFTER_ERROR); } finally { IOUtils.closeStream(out); IOUtils.closeStream(in); IOUtils.closeSocket(sock); proxySource.removePendingBlock(this); target.getDDatanode().removePendingBlock(this); synchronized (this) { reset(); } synchronized (Dispatcher.this) { Dispatcher.this.notifyAll(); } } } /** Send a block replace request to the output stream */ private void sendRequest(DataOutputStream out, ExtendedBlock eb, Token<BlockTokenIdentifier> accessToken) throws IOException { new Sender(out).replaceBlock(eb, target.storageType, accessToken, source.getDatanodeInfo().getDatanodeUuid(), proxySource.datanode); } /** Receive a block copy response from the input stream */ private void receiveResponse(DataInputStream in) throws IOException { BlockOpResponseProto response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); while (response.getStatus() == Status.IN_PROGRESS) { // read intermediate responses response = BlockOpResponseProto.parseFrom(vintPrefixed(in)); } if (response.getStatus() != Status.SUCCESS) { if (response.getStatus() == Status.ERROR_ACCESS_TOKEN) { throw new IOException("block move failed due to access token error"); } throw new IOException("block move is failed: " + response.getMessage()); } } /** reset the object */ private void reset() { block = null; source = null; proxySource = null; target = null; } } /** A class for keeping track of block locations in the dispatcher. */ private static class DBlock extends MovedBlocks.Locations<StorageGroup> { DBlock(Block block) { super(block); } } /** The class represents a desired move. */ static class Task { private final StorageGroup target; private long size; // bytes scheduled to move Task(StorageGroup target, long size) { this.target = target; this.size = size; } long getSize() { return size; } } /** A class that keeps track of a datanode. */ static class DDatanode { /** A group of storages in a datanode with the same storage type. */ class StorageGroup { final StorageType storageType; final long maxSize2Move; private long scheduledSize = 0L; private StorageGroup(StorageType storageType, long maxSize2Move) { this.storageType = storageType; this.maxSize2Move = maxSize2Move; } private DDatanode getDDatanode() { return DDatanode.this; } DatanodeInfo getDatanodeInfo() { return DDatanode.this.datanode; } /** Decide if still need to move more bytes */ synchronized boolean hasSpaceForScheduling() { return availableSizeToMove() > 0L; } /** @return the total number of bytes that need to be moved */ synchronized long availableSizeToMove() { return maxSize2Move - scheduledSize; } /** increment scheduled size */ synchronized void incScheduledSize(long size) { scheduledSize += size; } /** @return scheduled size */ synchronized long getScheduledSize() { return scheduledSize; } /** Reset scheduled size to zero. */ synchronized void resetScheduledSize() { scheduledSize = 0L; } /** @return the name for display */ String getDisplayName() { return datanode + ":" + storageType; } @Override public String toString() { return getDisplayName(); } } final DatanodeInfo datanode; final EnumMap<StorageType, StorageGroup> storageMap = new EnumMap<StorageType, StorageGroup>(StorageType.class); protected long delayUntil = 0L; /** blocks being moved but not confirmed yet */ private final List<PendingMove> pendings; private final int maxConcurrentMoves; @Override public String toString() { return getClass().getSimpleName() + ":" + datanode + ":" + storageMap.values(); } private DDatanode(DatanodeStorageReport r, int maxConcurrentMoves) { this.datanode = r.getDatanodeInfo(); this.maxConcurrentMoves = maxConcurrentMoves; this.pendings = new ArrayList<PendingMove>(maxConcurrentMoves); } private void put(StorageType storageType, StorageGroup g) { final StorageGroup existing = storageMap.put(storageType, g); Preconditions.checkState(existing == null); } StorageGroup addStorageGroup(StorageType storageType, long maxSize2Move) { final StorageGroup g = new StorageGroup(storageType, maxSize2Move); put(storageType, g); return g; } Source addSource(StorageType storageType, long maxSize2Move, Dispatcher d) { final Source s = d.new Source(storageType, maxSize2Move, this); put(storageType, s); return s; } synchronized private void activateDelay(long delta) { delayUntil = Time.monotonicNow() + delta; } synchronized private boolean isDelayActive() { if (delayUntil == 0 || Time.monotonicNow() > delayUntil) { delayUntil = 0; return false; } return true; } /** Check if the node can schedule more blocks to move */ synchronized boolean isPendingQNotFull() { return pendings.size() < maxConcurrentMoves; } /** Check if all the dispatched moves are done */ synchronized boolean isPendingQEmpty() { return pendings.isEmpty(); } /** Add a scheduled block move to the node */ synchronized boolean addPendingBlock(PendingMove pendingBlock) { if (!isDelayActive() && isPendingQNotFull()) { return pendings.add(pendingBlock); } return false; } /** Remove a scheduled block move from the node */ synchronized boolean removePendingBlock(PendingMove pendingBlock) { return pendings.remove(pendingBlock); } } /** A node that can be the sources of a block move */ class Source extends DDatanode.StorageGroup { private final List<Task> tasks = new ArrayList<Task>(2); private long blocksToReceive = 0L; /** * Source blocks point to the objects in {@link Dispatcher#globalBlocks} * because we want to keep one copy of a block and be aware that the * locations are changing over time. */ private final List<DBlock> srcBlocks = new ArrayList<DBlock>(); private Source(StorageType storageType, long maxSize2Move, DDatanode dn) { dn.super(storageType, maxSize2Move); } /** Add a task */ void addTask(Task task) { Preconditions.checkState(task.target != this, "Source and target are the same storage group " + getDisplayName()); incScheduledSize(task.size); tasks.add(task); } /** @return an iterator to this source's blocks */ Iterator<DBlock> getBlockIterator() { return srcBlocks.iterator(); } /** * Fetch new blocks of this source from namenode and update this source's * block list & {@link Dispatcher#globalBlocks}. * * @return the total size of the received blocks in the number of bytes. */ private long getBlockList() throws IOException { final long size = Math.min(MAX_BLOCKS_SIZE_TO_FETCH, blocksToReceive); final BlocksWithLocations newBlocks = nnc.getBlocks(getDatanodeInfo(), size); long bytesReceived = 0; for (BlockWithLocations blk : newBlocks.getBlocks()) { bytesReceived += blk.getBlock().getNumBytes(); synchronized (globalBlocks) { final DBlock block = globalBlocks.get(blk.getBlock()); synchronized (block) { block.clearLocations(); // update locations final String[] datanodeUuids = blk.getDatanodeUuids(); final StorageType[] storageTypes = blk.getStorageTypes(); for (int i = 0; i < datanodeUuids.length; i++) { final StorageGroup g = storageGroupMap.get( datanodeUuids[i], storageTypes[i]); if (g != null) { // not unknown block.addLocation(g); } } } if (!srcBlocks.contains(block) && isGoodBlockCandidate(block)) { // filter bad candidates srcBlocks.add(block); } } } return bytesReceived; } /** Decide if the given block is a good candidate to move or not */ private boolean isGoodBlockCandidate(DBlock block) { for (Task t : tasks) { if (Dispatcher.this.isGoodBlockCandidate(this, t.target, block)) { return true; } } return false; } /** * Choose a move for the source. The block's source, target, and proxy * are determined too. When choosing proxy and target, source & * target throttling has been considered. They are chosen only when they * have the capacity to support this block move. The block should be * dispatched immediately after this method is returned. * * @return a move that's good for the source to dispatch immediately. */ private PendingMove chooseNextMove() { for (Iterator<Task> i = tasks.iterator(); i.hasNext();) { final Task task = i.next(); final DDatanode target = task.target.getDDatanode(); PendingMove pendingBlock = new PendingMove(); if (target.addPendingBlock(pendingBlock)) { // target is not busy, so do a tentative block allocation pendingBlock.source = this; pendingBlock.target = task.target; if (pendingBlock.chooseBlockAndProxy()) { long blockSize = pendingBlock.block.getNumBytes(); incScheduledSize(-blockSize); task.size -= blockSize; if (task.size == 0) { i.remove(); } return pendingBlock; } else { // cancel the tentative move target.removePendingBlock(pendingBlock); } } } return null; } /** Iterate all source's blocks to remove moved ones */ private void removeMovedBlocks() { for (Iterator<DBlock> i = getBlockIterator(); i.hasNext();) { if (movedBlocks.contains(i.next().getBlock())) { i.remove(); } } } private static final int SOURCE_BLOCKS_MIN_SIZE = 5; /** @return if should fetch more blocks from namenode */ private boolean shouldFetchMoreBlocks() { return srcBlocks.size() < SOURCE_BLOCKS_MIN_SIZE && blocksToReceive > 0; } private static final long MAX_ITERATION_TIME = 20 * 60 * 1000L; // 20 mins /** * This method iteratively does the following: it first selects a block to * move, then sends a request to the proxy source to start the block move * when the source's block list falls below a threshold, it asks the * namenode for more blocks. It terminates when it has dispatch enough block * move tasks or it has received enough blocks from the namenode, or the * elapsed time of the iteration has exceeded the max time limit. */ private void dispatchBlocks() { final long startTime = Time.monotonicNow(); this.blocksToReceive = 2 * getScheduledSize(); boolean isTimeUp = false; int noPendingMoveIteration = 0; while (!isTimeUp && getScheduledSize() > 0 && (!srcBlocks.isEmpty() || blocksToReceive > 0)) { final PendingMove p = chooseNextMove(); if (p != null) { // move the block moveExecutor.execute(new Runnable() { @Override public void run() { p.dispatch(); } }); continue; } // Since we cannot schedule any block to move, // remove any moved blocks from the source block list and removeMovedBlocks(); // filter already moved blocks // check if we should fetch more blocks from the namenode if (shouldFetchMoreBlocks()) { // fetch new blocks try { blocksToReceive -= getBlockList(); continue; } catch (IOException e) { LOG.warn("Exception while getting block list", e); return; } } else { // source node cannot find a pending block to move, iteration +1 noPendingMoveIteration++; // in case no blocks can be moved for source node's task, // jump out of while-loop after 5 iterations. if (noPendingMoveIteration >= MAX_NO_PENDING_MOVE_ITERATIONS) { resetScheduledSize(); } } // check if time is up or not if (Time.monotonicNow() - startTime > MAX_ITERATION_TIME) { isTimeUp = true; continue; } // Now we can not schedule any block to move and there are // no new blocks added to the source block list, so we wait. try { synchronized (Dispatcher.this) { Dispatcher.this.wait(1000); // wait for targets/sources to be idle } } catch (InterruptedException ignored) { } } } } public Dispatcher(NameNodeConnector nnc, Set<String> includedNodes, Set<String> excludedNodes, long movedWinWidth, int moverThreads, int dispatcherThreads, int maxConcurrentMovesPerNode, Configuration conf) { this.nnc = nnc; this.excludedNodes = excludedNodes; this.includedNodes = includedNodes; this.movedBlocks = new MovedBlocks<StorageGroup>(movedWinWidth); this.cluster = NetworkTopology.getInstance(conf); this.moveExecutor = Executors.newFixedThreadPool(moverThreads); this.dispatchExecutor = Executors.newFixedThreadPool(dispatcherThreads); this.maxConcurrentMovesPerNode = maxConcurrentMovesPerNode; final boolean fallbackToSimpleAuthAllowed = conf.getBoolean( CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY, CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT); this.saslClient = new SaslDataTransferClient( DataTransferSaslUtil.getSaslPropertiesResolver(conf), TrustedChannelResolver.getInstance(conf), fallbackToSimpleAuthAllowed); } StorageGroupMap getStorageGroupMap() { return storageGroupMap; } NetworkTopology getCluster() { return cluster; } long getBytesMoved() { return bytesMoved.get(); } long bytesToMove() { Preconditions.checkState( storageGroupMap.size() >= sources.size() + targets.size(), "Mismatched number of storage groups (" + storageGroupMap.size() + " < " + sources.size() + " sources + " + targets.size() + " targets)"); long b = 0L; for (Source src : sources) { b += src.getScheduledSize(); } return b; } void add(Source source, StorageGroup target) { sources.add(source); targets.add(target); } private boolean shouldIgnore(DatanodeInfo dn) { // ignore decommissioned nodes final boolean decommissioned = dn.isDecommissioned(); // ignore decommissioning nodes final boolean decommissioning = dn.isDecommissionInProgress(); // ignore nodes in exclude list final boolean excluded = Util.isExcluded(excludedNodes, dn); // ignore nodes not in the include list (if include list is not empty) final boolean notIncluded = !Util.isIncluded(includedNodes, dn); if (decommissioned || decommissioning || excluded || notIncluded) { if (LOG.isTraceEnabled()) { LOG.trace("Excluding datanode " + dn + ": " + decommissioned + ", " + decommissioning + ", " + excluded + ", " + notIncluded); } return true; } return false; } /** Get live datanode storage reports and then build the network topology. */ List<DatanodeStorageReport> init() throws IOException { final DatanodeStorageReport[] reports = nnc.getLiveDatanodeStorageReport(); final List<DatanodeStorageReport> trimmed = new ArrayList<DatanodeStorageReport>(); // create network topology and classify utilization collections: // over-utilized, above-average, below-average and under-utilized. for (DatanodeStorageReport r : DFSUtil.shuffle(reports)) { final DatanodeInfo datanode = r.getDatanodeInfo(); if (shouldIgnore(datanode)) { continue; } trimmed.add(r); cluster.add(datanode); } return trimmed; } public DDatanode newDatanode(DatanodeStorageReport r) { return new DDatanode(r, maxConcurrentMovesPerNode); } public boolean dispatchAndCheckContinue() throws InterruptedException { return nnc.shouldContinue(dispatchBlockMoves()); } /** * Dispatch block moves for each source. The thread selects blocks to move & * sends request to proxy source to initiate block move. The process is flow * controlled. Block selection is blocked if there are too many un-confirmed * block moves. * * @return the total number of bytes successfully moved in this iteration. */ private long dispatchBlockMoves() throws InterruptedException { final long bytesLastMoved = bytesMoved.get(); final Future<?>[] futures = new Future<?>[sources.size()]; final Iterator<Source> i = sources.iterator(); for (int j = 0; j < futures.length; j++) { final Source s = i.next(); futures[j] = dispatchExecutor.submit(new Runnable() { @Override public void run() { s.dispatchBlocks(); } }); } // wait for all dispatcher threads to finish for (Future<?> future : futures) { try { future.get(); } catch (ExecutionException e) { LOG.warn("Dispatcher thread failed", e.getCause()); } } // wait for all block moving to be done waitForMoveCompletion(); return bytesMoved.get() - bytesLastMoved; } /** The sleeping period before checking if block move is completed again */ static private long blockMoveWaitTime = 30000L; /** set the sleeping period for block move completion check */ static void setBlockMoveWaitTime(long time) { blockMoveWaitTime = time; } /** Wait for all block move confirmations. */ private void waitForMoveCompletion() { for(;;) { boolean empty = true; for (StorageGroup t : targets) { if (!t.getDDatanode().isPendingQEmpty()) { empty = false; break; } } if (empty) { return; //all pending queues are empty } try { Thread.sleep(blockMoveWaitTime); } catch (InterruptedException ignored) { } } } /** * Decide if the block is a good candidate to be moved from source to target. * A block is a good candidate if * 1. the block is not in the process of being moved/has not been moved; * 2. the block does not have a replica on the target; * 3. doing the move does not reduce the number of racks that the block has */ private boolean isGoodBlockCandidate(Source source, StorageGroup target, DBlock block) { if (source.storageType != target.storageType) { return false; } // check if the block is moved or not if (movedBlocks.contains(block.getBlock())) { return false; } if (block.isLocatedOn(target)) { return false; } if (cluster.isNodeGroupAware() && isOnSameNodeGroupWithReplicas(target, block, source)) { return false; } if (reduceNumOfRacks(source, target, block)) { return false; } return true; } /** * Determine whether moving the given block replica from source to target * would reduce the number of racks of the block replicas. */ private boolean reduceNumOfRacks(Source source, StorageGroup target, DBlock block) { final DatanodeInfo sourceDn = source.getDatanodeInfo(); if (cluster.isOnSameRack(sourceDn, target.getDatanodeInfo())) { // source and target are on the same rack return false; } boolean notOnSameRack = true; synchronized (block) { for (StorageGroup loc : block.getLocations()) { if (cluster.isOnSameRack(loc.getDatanodeInfo(), target.getDatanodeInfo())) { notOnSameRack = false; break; } } } if (notOnSameRack) { // target is not on the same rack as any replica return false; } for (StorageGroup g : block.getLocations()) { if (g != source && cluster.isOnSameRack(g.getDatanodeInfo(), sourceDn)) { // source is on the same rack of another replica return false; } } return true; } /** * Check if there are any replica (other than source) on the same node group * with target. If true, then target is not a good candidate for placing * specific replica as we don't want 2 replicas under the same nodegroup. * * @return true if there are any replica (other than source) on the same node * group with target */ private boolean isOnSameNodeGroupWithReplicas( StorageGroup target, DBlock block, Source source) { final DatanodeInfo targetDn = target.getDatanodeInfo(); for (StorageGroup g : block.getLocations()) { if (g != source && cluster.isOnSameNodeGroup(g.getDatanodeInfo(), targetDn)) { return true; } } return false; } /** Reset all fields in order to prepare for the next iteration */ void reset(Configuration conf) { cluster = NetworkTopology.getInstance(conf); storageGroupMap.clear(); sources.clear(); targets.clear(); globalBlocks.removeAllButRetain(movedBlocks); movedBlocks.cleanup(); } /** shutdown thread pools */ void shutdownNow() { dispatchExecutor.shutdownNow(); moveExecutor.shutdownNow(); } static class Util { /** @return true if data node is part of the excludedNodes. */ static boolean isExcluded(Set<String> excludedNodes, DatanodeInfo dn) { return isIn(excludedNodes, dn); } /** * @return true if includedNodes is empty or data node is part of the * includedNodes. */ static boolean isIncluded(Set<String> includedNodes, DatanodeInfo dn) { return (includedNodes.isEmpty() || isIn(includedNodes, dn)); } /** * Match is checked using host name , ip address with and without port * number. * * @return true if the datanode's transfer address matches the set of nodes. */ private static boolean isIn(Set<String> datanodes, DatanodeInfo dn) { return isIn(datanodes, dn.getPeerHostName(), dn.getXferPort()) || isIn(datanodes, dn.getIpAddr(), dn.getXferPort()) || isIn(datanodes, dn.getHostName(), dn.getXferPort()); } /** @return true if nodes contains host or host:port */ private static boolean isIn(Set<String> nodes, String host, int port) { if (host == null) { return false; } return (nodes.contains(host) || nodes.contains(host + ":" + port)); } /** * Parse a comma separated string to obtain set of host names * * @return set of host names */ static Set<String> parseHostList(String string) { String[] addrs = StringUtils.getTrimmedStrings(string); return new HashSet<String>(Arrays.asList(addrs)); } /** * Read set of host names from a file * * @return set of host names */ static Set<String> getHostListFromFile(String fileName, String type) { Set<String> nodes = new HashSet<String>(); try { HostsFileReader.readFileToSet(type, fileName, nodes); return StringUtils.getTrimmedStrings(nodes); } catch (IOException e) { throw new IllegalArgumentException( "Failed to read host list from file: " + fileName); } } } }
googleapis/google-cloud-java
35,032
java-alloydb/proto-google-cloud-alloydb-v1alpha/src/main/java/com/google/cloud/alloydb/v1alpha/EncryptionInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1alpha/resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1alpha; /** * * * <pre> * EncryptionInfo describes the encryption information of a cluster or a backup. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1alpha.EncryptionInfo} */ public final class EncryptionInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1alpha.EncryptionInfo) EncryptionInfoOrBuilder { private static final long serialVersionUID = 0L; // Use EncryptionInfo.newBuilder() to construct. private EncryptionInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private EncryptionInfo() { encryptionType_ = 0; kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new EncryptionInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1alpha.ResourcesProto .internal_static_google_cloud_alloydb_v1alpha_EncryptionInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1alpha.ResourcesProto .internal_static_google_cloud_alloydb_v1alpha_EncryptionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1alpha.EncryptionInfo.class, com.google.cloud.alloydb.v1alpha.EncryptionInfo.Builder.class); } /** * * * <pre> * Possible encryption types. * </pre> * * Protobuf enum {@code google.cloud.alloydb.v1alpha.EncryptionInfo.Type} */ public enum Type implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Encryption type not specified. Defaults to GOOGLE_DEFAULT_ENCRYPTION. * </pre> * * <code>TYPE_UNSPECIFIED = 0;</code> */ TYPE_UNSPECIFIED(0), /** * * * <pre> * The data is encrypted at rest with a key that is fully managed by Google. * No key version will be populated. This is the default state. * </pre> * * <code>GOOGLE_DEFAULT_ENCRYPTION = 1;</code> */ GOOGLE_DEFAULT_ENCRYPTION(1), /** * * * <pre> * The data is encrypted at rest with a key that is managed by the customer. * KMS key versions will be populated. * </pre> * * <code>CUSTOMER_MANAGED_ENCRYPTION = 2;</code> */ CUSTOMER_MANAGED_ENCRYPTION(2), UNRECOGNIZED(-1), ; /** * * * <pre> * Encryption type not specified. Defaults to GOOGLE_DEFAULT_ENCRYPTION. * </pre> * * <code>TYPE_UNSPECIFIED = 0;</code> */ public static final int TYPE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The data is encrypted at rest with a key that is fully managed by Google. * No key version will be populated. This is the default state. * </pre> * * <code>GOOGLE_DEFAULT_ENCRYPTION = 1;</code> */ public static final int GOOGLE_DEFAULT_ENCRYPTION_VALUE = 1; /** * * * <pre> * The data is encrypted at rest with a key that is managed by the customer. * KMS key versions will be populated. * </pre> * * <code>CUSTOMER_MANAGED_ENCRYPTION = 2;</code> */ public static final int CUSTOMER_MANAGED_ENCRYPTION_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Type valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Type forNumber(int value) { switch (value) { case 0: return TYPE_UNSPECIFIED; case 1: return GOOGLE_DEFAULT_ENCRYPTION; case 2: return CUSTOMER_MANAGED_ENCRYPTION; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Type>() { public Type findValueByNumber(int number) { return Type.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.alloydb.v1alpha.EncryptionInfo.getDescriptor().getEnumTypes().get(0); } private static final Type[] VALUES = values(); public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Type(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1alpha.EncryptionInfo.Type) } public static final int ENCRYPTION_TYPE_FIELD_NUMBER = 1; private int encryptionType_ = 0; /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for encryptionType. */ @java.lang.Override public int getEncryptionTypeValue() { return encryptionType_; } /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The encryptionType. */ @java.lang.Override public com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type getEncryptionType() { com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type result = com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type.forNumber(encryptionType_); return result == null ? com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type.UNRECOGNIZED : result; } public static final int KMS_KEY_VERSIONS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the kmsKeyVersions. */ public com.google.protobuf.ProtocolStringList getKmsKeyVersionsList() { return kmsKeyVersions_; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The count of kmsKeyVersions. */ public int getKmsKeyVersionsCount() { return kmsKeyVersions_.size(); } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The kmsKeyVersions at the given index. */ public java.lang.String getKmsKeyVersions(int index) { return kmsKeyVersions_.get(index); } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the kmsKeyVersions at the given index. */ public com.google.protobuf.ByteString getKmsKeyVersionsBytes(int index) { return kmsKeyVersions_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (encryptionType_ != com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type.TYPE_UNSPECIFIED.getNumber()) { output.writeEnum(1, encryptionType_); } for (int i = 0; i < kmsKeyVersions_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, kmsKeyVersions_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (encryptionType_ != com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type.TYPE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, encryptionType_); } { int dataSize = 0; for (int i = 0; i < kmsKeyVersions_.size(); i++) { dataSize += computeStringSizeNoTag(kmsKeyVersions_.getRaw(i)); } size += dataSize; size += 1 * getKmsKeyVersionsList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1alpha.EncryptionInfo)) { return super.equals(obj); } com.google.cloud.alloydb.v1alpha.EncryptionInfo other = (com.google.cloud.alloydb.v1alpha.EncryptionInfo) obj; if (encryptionType_ != other.encryptionType_) return false; if (!getKmsKeyVersionsList().equals(other.getKmsKeyVersionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ENCRYPTION_TYPE_FIELD_NUMBER; hash = (53 * hash) + encryptionType_; if (getKmsKeyVersionsCount() > 0) { hash = (37 * hash) + KMS_KEY_VERSIONS_FIELD_NUMBER; hash = (53 * hash) + getKmsKeyVersionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.alloydb.v1alpha.EncryptionInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * EncryptionInfo describes the encryption information of a cluster or a backup. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1alpha.EncryptionInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1alpha.EncryptionInfo) com.google.cloud.alloydb.v1alpha.EncryptionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1alpha.ResourcesProto .internal_static_google_cloud_alloydb_v1alpha_EncryptionInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1alpha.ResourcesProto .internal_static_google_cloud_alloydb_v1alpha_EncryptionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1alpha.EncryptionInfo.class, com.google.cloud.alloydb.v1alpha.EncryptionInfo.Builder.class); } // Construct using com.google.cloud.alloydb.v1alpha.EncryptionInfo.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; encryptionType_ = 0; kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1alpha.ResourcesProto .internal_static_google_cloud_alloydb_v1alpha_EncryptionInfo_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1alpha.EncryptionInfo getDefaultInstanceForType() { return com.google.cloud.alloydb.v1alpha.EncryptionInfo.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1alpha.EncryptionInfo build() { com.google.cloud.alloydb.v1alpha.EncryptionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1alpha.EncryptionInfo buildPartial() { com.google.cloud.alloydb.v1alpha.EncryptionInfo result = new com.google.cloud.alloydb.v1alpha.EncryptionInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1alpha.EncryptionInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.encryptionType_ = encryptionType_; } if (((from_bitField0_ & 0x00000002) != 0)) { kmsKeyVersions_.makeImmutable(); result.kmsKeyVersions_ = kmsKeyVersions_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1alpha.EncryptionInfo) { return mergeFrom((com.google.cloud.alloydb.v1alpha.EncryptionInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1alpha.EncryptionInfo other) { if (other == com.google.cloud.alloydb.v1alpha.EncryptionInfo.getDefaultInstance()) return this; if (other.encryptionType_ != 0) { setEncryptionTypeValue(other.getEncryptionTypeValue()); } if (!other.kmsKeyVersions_.isEmpty()) { if (kmsKeyVersions_.isEmpty()) { kmsKeyVersions_ = other.kmsKeyVersions_; bitField0_ |= 0x00000002; } else { ensureKmsKeyVersionsIsMutable(); kmsKeyVersions_.addAll(other.kmsKeyVersions_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { encryptionType_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { java.lang.String s = input.readStringRequireUtf8(); ensureKmsKeyVersionsIsMutable(); kmsKeyVersions_.add(s); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int encryptionType_ = 0; /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for encryptionType. */ @java.lang.Override public int getEncryptionTypeValue() { return encryptionType_; } /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for encryptionType to set. * @return This builder for chaining. */ public Builder setEncryptionTypeValue(int value) { encryptionType_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The encryptionType. */ @java.lang.Override public com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type getEncryptionType() { com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type result = com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type.forNumber(encryptionType_); return result == null ? com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type.UNRECOGNIZED : result; } /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The encryptionType to set. * @return This builder for chaining. */ public Builder setEncryptionType(com.google.cloud.alloydb.v1alpha.EncryptionInfo.Type value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; encryptionType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. Type of encryption. * </pre> * * <code> * .google.cloud.alloydb.v1alpha.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearEncryptionType() { bitField0_ = (bitField0_ & ~0x00000001); encryptionType_ = 0; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureKmsKeyVersionsIsMutable() { if (!kmsKeyVersions_.isModifiable()) { kmsKeyVersions_ = new com.google.protobuf.LazyStringArrayList(kmsKeyVersions_); } bitField0_ |= 0x00000002; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the kmsKeyVersions. */ public com.google.protobuf.ProtocolStringList getKmsKeyVersionsList() { kmsKeyVersions_.makeImmutable(); return kmsKeyVersions_; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The count of kmsKeyVersions. */ public int getKmsKeyVersionsCount() { return kmsKeyVersions_.size(); } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The kmsKeyVersions at the given index. */ public java.lang.String getKmsKeyVersions(int index) { return kmsKeyVersions_.get(index); } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the kmsKeyVersions at the given index. */ public com.google.protobuf.ByteString getKmsKeyVersionsBytes(int index) { return kmsKeyVersions_.getByteString(index); } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index to set the value at. * @param value The kmsKeyVersions to set. * @return This builder for chaining. */ public Builder setKmsKeyVersions(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureKmsKeyVersionsIsMutable(); kmsKeyVersions_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The kmsKeyVersions to add. * @return This builder for chaining. */ public Builder addKmsKeyVersions(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureKmsKeyVersionsIsMutable(); kmsKeyVersions_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param values The kmsKeyVersions to add. * @return This builder for chaining. */ public Builder addAllKmsKeyVersions(java.lang.Iterable<java.lang.String> values) { ensureKmsKeyVersionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, kmsKeyVersions_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearKmsKeyVersions() { kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * Output only. Cloud KMS key versions that are being used to protect the * database or the backup. * </pre> * * <code> * repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes of the kmsKeyVersions to add. * @return This builder for chaining. */ public Builder addKmsKeyVersionsBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureKmsKeyVersionsIsMutable(); kmsKeyVersions_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1alpha.EncryptionInfo) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1alpha.EncryptionInfo) private static final com.google.cloud.alloydb.v1alpha.EncryptionInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1alpha.EncryptionInfo(); } public static com.google.cloud.alloydb.v1alpha.EncryptionInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<EncryptionInfo> PARSER = new com.google.protobuf.AbstractParser<EncryptionInfo>() { @java.lang.Override public EncryptionInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<EncryptionInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<EncryptionInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1alpha.EncryptionInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hop
35,034
plugins/misc/git/src/main/java/org/apache/hop/git/model/UIGit.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hop.git.model; import com.google.common.annotations.VisibleForTesting; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.StringUtils; import org.apache.hop.core.Const; import org.apache.hop.core.exception.HopException; import org.apache.hop.core.exception.HopFileException; import org.apache.hop.core.logging.LogChannel; import org.apache.hop.core.vfs.HopVfs; import org.apache.hop.git.model.revision.GitObjectRevision; import org.apache.hop.git.model.revision.ObjectRevision; import org.apache.hop.i18n.BaseMessages; import org.apache.hop.ui.core.dialog.EnterSelectionDialog; import org.apache.hop.ui.core.dialog.ErrorDialog; import org.apache.hop.ui.hopgui.HopGui; import org.eclipse.jgit.api.CloneCommand; import org.eclipse.jgit.api.DiffCommand; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.ListBranchCommand.ListMode; import org.eclipse.jgit.api.LogCommand; import org.eclipse.jgit.api.MergeResult; import org.eclipse.jgit.api.MergeResult.MergeStatus; import org.eclipse.jgit.api.PushCommand; import org.eclipse.jgit.api.RemoteAddCommand; import org.eclipse.jgit.api.RemoteRemoveCommand; import org.eclipse.jgit.api.ResetCommand.ResetType; import org.eclipse.jgit.api.RevertCommand; import org.eclipse.jgit.api.Status; import org.eclipse.jgit.api.StatusCommand; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.TransportException; import org.eclipse.jgit.diff.DiffEntry; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.diff.RenameDetector; import org.eclipse.jgit.dircache.DirCacheIterator; import org.eclipse.jgit.errors.CorruptObjectException; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.NoWorkTreeException; import org.eclipse.jgit.errors.RevisionSyntaxException; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.RepositoryState; import org.eclipse.jgit.lib.StoredConfig; import org.eclipse.jgit.lib.UserConfig; import org.eclipse.jgit.merge.MergeStrategy; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevObject; import org.eclipse.jgit.revwalk.RevTree; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.transport.CredentialsProvider; import org.eclipse.jgit.transport.HttpTransport; import org.eclipse.jgit.transport.PushResult; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.RemoteConfig; import org.eclipse.jgit.transport.RemoteRefUpdate; import org.eclipse.jgit.transport.SshSessionFactory; import org.eclipse.jgit.transport.URIish; import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider; import org.eclipse.jgit.transport.http.apache.HttpClientConnectionFactory; import org.eclipse.jgit.transport.sshd.SshdSessionFactory; import org.eclipse.jgit.transport.sshd.SshdSessionFactoryBuilder; import org.eclipse.jgit.treewalk.AbstractTreeIterator; import org.eclipse.jgit.treewalk.CanonicalTreeParser; import org.eclipse.jgit.treewalk.EmptyTreeIterator; import org.eclipse.jgit.treewalk.FileTreeIterator; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.filter.PathFilter; import org.eclipse.jgit.treewalk.filter.TreeFilter; import org.eclipse.jgit.util.FileUtils; import org.eclipse.jgit.util.RawParseUtils; import org.eclipse.jgit.util.SystemReader; public class UIGit extends VCS { protected static final Class<?> PKG = UIGit.class; public static final String CONST_FOR_COMMIT_ID = "' for commit ID '"; public static final String CONST_OURS = ".ours"; public static final String CONST_THEIRS = ".theirs"; public static final String CONST_AUTHENTICATION_IS_REQUIRED_BUT_NO_CREDENTIALS_PROVIDER_HAS_BEEN_REGISTERED = "Authentication is required but no CredentialsProvider has been registered"; public static final String CONST_NOT_AUTHORIZED = "not authorized"; public static final String CONST_DIALOG_ERROR = "Dialog.Error"; static { /** * Use Apache HTTP Client instead of Sun HTTP client. This resolves the issue that Git commands * (e.g., push, clone) via http(s) do not work in EE. This issue is caused by the fact that weka * plugins (namely, knowledge-flow, weka-forecasting, and weka-scoring) calls * java.net.Authenticator.setDefault(). See here * https://bugs.eclipse.org/bugs/show_bug.cgi?id=296201 for more details. */ HttpTransport.setConnectionFactory(new HttpClientConnectionFactory()); } private Git git; private CredentialsProvider credentialsProvider; /* (non-Javadoc) * @see org.apache.hop.git.spoon.model.VCS#getDirectory() */ public String getDirectory() { return directory; } @VisibleForTesting void setDirectory(String directory) { this.directory = directory; } @VisibleForTesting void setGit(Git git) { this.git = git; } public String getAuthorName(String commitId) { if (commitId.equals(VCS.WORKINGTREE)) { Config config = git.getRepository().getConfig(); return config.get(UserConfig.KEY).getAuthorName() + " <" + config.get(UserConfig.KEY).getAuthorEmail() + ">"; } else { RevCommit commit = resolve(commitId); PersonIdent author = commit.getAuthorIdent(); final StringBuilder r = new StringBuilder(); r.append(author.getName()); r.append(" <"); // r.append(author.getEmailAddress()); r.append(">"); // return r.toString(); } } public String getCommitMessage(String commitId) { if (commitId.equals(VCS.WORKINGTREE)) { try { String mergeMsg = git.getRepository().readMergeCommitMsg(); return mergeMsg == null ? "" : mergeMsg; } catch (Exception e) { return e.getMessage(); } } else { RevCommit commit = resolve(commitId); return commit.getFullMessage(); } } public String getCommitId(String revstr) { ObjectId id = null; try { id = git.getRepository().resolve(revstr); } catch (RevisionSyntaxException | IOException e) { e.printStackTrace(); } if (id == null) { return null; } else { return id.getName(); } } public String getParentCommitId(String revstr) { return getCommitId(revstr + "~"); } public String getBranch() { try { Ref head = git.getRepository().exactRef(Constants.HEAD); String branch = git.getRepository().getBranch(); if (head.getLeaf().getName().equals(Constants.HEAD)) { // if detached return Constants.HEAD + " detached at " + branch.substring(0, 7); } else { return branch; } } catch (Exception e) { return ""; } } public List<String> listBranches() { List<String> branches = new ArrayList<>(); try { List<Ref> branchListCall = git.branchList().call(); for (Ref ref : branchListCall) { branches.add(ref.getName()); } // sort the list of branches Collections.sort(branches); return branches; } catch (Exception e) { return Collections.emptyList(); } } public List<String> getLocalBranches() { return getBranches(null); } public List<String> getBranches() { return getBranches(ListMode.ALL); } /** * Get a list of branches based on mode * * @param mode * @return */ private List<String> getBranches(ListMode mode) { try { return git.branchList().setListMode(mode).call().stream() .filter(ref -> !ref.getName().endsWith(Constants.HEAD)) .map(ref -> Repository.shortenRefName(ref.getName())) .toList(); } catch (Exception e) { e.printStackTrace(); } return null; } public String getRemote() { try { StoredConfig config = git.getRepository().getConfig(); RemoteConfig remoteConfig = new RemoteConfig(config, Constants.DEFAULT_REMOTE_NAME); return remoteConfig.getURIs().iterator().next().toString(); } catch (Exception e) { return ""; } } public void addRemote(String value) { // Make sure you have only one URI for push removeRemote(); try { URIish uri = new URIish(value); RemoteAddCommand cmd = git.remoteAdd(); cmd.setName(Constants.DEFAULT_REMOTE_NAME); cmd.setUri(uri); cmd.call(); } catch (URISyntaxException e) { if (value.equals("")) { removeRemote(); } else { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } catch (GitAPIException e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } public void removeRemote() { RemoteRemoveCommand cmd = git.remoteRemove(); cmd.setRemoteName(Constants.DEFAULT_REMOTE_NAME); try { cmd.call(); } catch (GitAPIException e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } public boolean hasRemote() { StoredConfig config = git.getRepository().getConfig(); Set<String> remotes = config.getSubsections(ConfigConstants.CONFIG_REMOTE_SECTION); return remotes.contains(Constants.DEFAULT_REMOTE_NAME); } public boolean commit(String authorName, String message) throws HopException { PersonIdent author = RawParseUtils.parsePersonIdent(authorName); // Set the local time PersonIdent author2 = new PersonIdent( author.getName(), author.getEmailAddress(), SystemReader.getInstance().getCurrentTime(), SystemReader.getInstance().getTimezone(SystemReader.getInstance().getCurrentTime())); try { git.commit().setAuthor(author2).setMessage(message).call(); return true; } catch (Exception e) { throw new HopException("Error in git commit", e); } } public List<ObjectRevision> getRevisions() { return getRevisions(null); } public List<ObjectRevision> getRevisions(String path) { List<ObjectRevision> revisions = new ArrayList<>(); try { if (!isClean() || git.getRepository().getRepositoryState() == RepositoryState.MERGING_RESOLVED) { GitObjectRevision rev = new GitObjectRevision(WORKINGTREE, "*", new Date(), " // " + VCS.WORKINGTREE); revisions.add(rev); } LogCommand logCommand = git.log(); if (path != null && !".".equals(path)) { logCommand = logCommand.addPath(path); } Iterable<RevCommit> iterable = logCommand.call(); for (RevCommit commit : iterable) { GitObjectRevision rev = new GitObjectRevision( commit.getName(), commit.getAuthorIdent().getName(), commit.getAuthorIdent().getWhen(), commit.getShortMessage()); revisions.add(rev); } } catch (Exception e) { // Do nothing } return revisions; } public List<UIFile> getUnstagedFiles() { return getUnstagedFiles(null); } public List<UIFile> getUnstagedFiles(String path) { List<UIFile> files = new ArrayList<>(); Status status = null; try { StatusCommand statusCommand = git.status(); if (path != null && !".".equals(path)) { statusCommand = statusCommand.addPath(path); } status = statusCommand.call(); } catch (Exception e) { e.printStackTrace(); return files; } status.getUntracked().forEach(name -> files.add(new UIFile(name, ChangeType.ADD, false))); status.getModified().forEach(name -> files.add(new UIFile(name, ChangeType.MODIFY, false))); status.getConflicting().forEach(name -> files.add(new UIFile(name, ChangeType.MODIFY, false))); status.getMissing().forEach(name -> files.add(new UIFile(name, ChangeType.DELETE, false))); return files; } public List<UIFile> getStagedFiles() { List<UIFile> files = new ArrayList<>(); Status status = null; try { status = git.status().call(); } catch (Exception e) { e.printStackTrace(); return files; } status.getAdded().forEach(name -> files.add(new UIFile(name, ChangeType.ADD, true))); status.getChanged().forEach(name -> files.add(new UIFile(name, ChangeType.MODIFY, true))); status.getRemoved().forEach(name -> files.add(new UIFile(name, ChangeType.DELETE, true))); return files; } public List<UIFile> getStagedFiles(String oldCommitId, String newCommitId) { List<UIFile> files = new ArrayList<>(); try { List<DiffEntry> diffs = getDiffCommand(oldCommitId, newCommitId).setShowNameAndStatusOnly(true).call(); RenameDetector rd = new RenameDetector(git.getRepository()); rd.addAll(diffs); diffs = rd.compute(); diffs.forEach( diff -> files.add( new UIFile( diff.getChangeType() == ChangeType.DELETE ? diff.getOldPath() : diff.getNewPath(), diff.getChangeType(), false))); } catch (Exception e) { e.printStackTrace(); } return files; } public boolean hasStagedFiles() { if (git.getRepository().getRepositoryState() == RepositoryState.SAFE) { return !getStagedFiles().isEmpty(); } else { return git.getRepository().getRepositoryState().canCommit(); } } public void initRepo(String baseDirectory) throws Exception { git = Git.init().setDirectory(new File(baseDirectory)).call(); directory = baseDirectory; } public void openRepo(String baseDirectory) throws Exception { git = Git.open(new File(baseDirectory)); directory = baseDirectory; } public void closeRepo() { git.close(); git = null; } public void add(String filePattern) throws HopException { try { if (filePattern.endsWith(CONST_OURS) || filePattern.endsWith(CONST_THEIRS)) { FileUtils.rename( new File(directory, filePattern), new File(directory, FilenameUtils.removeExtension(filePattern)), StandardCopyOption.REPLACE_EXISTING); filePattern = FilenameUtils.removeExtension(filePattern); org.apache.commons.io.FileUtils.deleteQuietly( new File(directory, filePattern + CONST_OURS)); org.apache.commons.io.FileUtils.deleteQuietly( new File(directory, filePattern + CONST_THEIRS)); } git.add().addFilepattern(filePattern).call(); } catch (Exception e) { throw new HopException("Error adding '" + filePattern + "'to git", e); } } public void rm(String filepattern) { try { git.rm().addFilepattern(filepattern).call(); } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } /** Reset to a commit (mixed) */ public void reset(String name) { try { git.reset().setRef(name).call(); } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } /** Reset a file to HEAD (mixed) */ public void resetPath(String path) { try { git.reset().addPath(path).call(); } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } @VisibleForTesting void resetHard() throws Exception { git.reset().setMode(ResetType.HARD).call(); } public boolean rollback(String name) { if (hasUncommittedChanges()) { showMessageBox( BaseMessages.getString(PKG, CONST_DIALOG_ERROR), BaseMessages.getString(PKG, "Git.Dialog.UncommittedChanges.Message")); return false; } String commit = resolve(Constants.HEAD).getName(); RevertCommand cmd = git.revert(); for (int i = 0; i < getRevisions().size(); i++) { String commitId = getRevisions().get(i).getRevisionId(); /* * Revert commits from HEAD to the specified commit in reverse order. */ cmd.include(resolve(commitId)); if (commitId.equals(name)) { break; } } try { cmd.call(); git.reset().setRef(commit).call(); return true; } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } return false; } public boolean pull() throws HopException { if (hasUncommittedChanges()) { throw new HopException( "You have uncommitted changes. Please commit work before pulling changes."); } if (!hasRemote()) { throw new HopException("There is no remote set up to pull from. Please set this up first."); } try { // Pull = Fetch + Merge git.fetch().setCredentialsProvider(credentialsProvider).call(); return mergeBranch( Constants.DEFAULT_REMOTE_NAME + "/" + getBranch(), MergeStrategy.RECURSIVE); } catch (TransportException e) { if (e.getMessage() .contains( CONST_AUTHENTICATION_IS_REQUIRED_BUT_NO_CREDENTIALS_PROVIDER_HAS_BEEN_REGISTERED) || e.getMessage() .contains(CONST_NOT_AUTHORIZED)) { // when the cached credential does not work if (e.getMessage().contains(CONST_NOT_AUTHORIZED)) { new ErrorDialog( HopGui.getInstance().getShell(), "Git Error", "Error Authenticating to Git service", e); } if (promptUsernamePassword()) { return pull(); } } else { throw new HopException("There was an error doing a git pull", e); } } catch (Exception e) { throw new HopException("There was an error doing a git pull", e); } return false; } public boolean push() throws HopException { return push("default"); } public boolean push(String type) throws HopException { if (!hasRemote()) { throw new HopException("There is no remote set up to push to. Please set this up."); } String name = null; List<String> names; EnterSelectionDialog esd; if (type.equals(VCS.TYPE_BRANCH)) { names = getLocalBranches(); esd = getEnterSelectionDialog( names.toArray(new String[names.size()]), "Select Branch", "Select the branch to push..."); name = esd.open(); if (name == null) { return false; } } else if (type.equals(VCS.TYPE_TAG)) { names = getTags(); esd = getEnterSelectionDialog( names.toArray(new String[names.size()]), "Select Tag", "Select the tag to push..."); name = esd.open(); if (name == null) { return false; } } try { name = name == null ? null : getExpandedName(name, type); PushCommand cmd; String url = git.getRepository().getConfig().getString("remote", "origin", "url"); if (!StringUtils.isEmpty(url) && (url.startsWith("https://") || url.startsWith("http://"))) { cmd = git.push(); cmd.setCredentialsProvider(credentialsProvider); } else { SshdSessionFactory customFactory = new SshdSessionFactoryBuilder() .setHomeDirectory(new File(System.getProperty("user.home"))) .setSshDirectory(new File(System.getProperty("user.home"), ".ssh")) .build(null); SshSessionFactory.setInstance(customFactory); cmd = git.push(); } if (name != null) { cmd.setRefSpecs(new RefSpec(name)); } Iterable<PushResult> resultIterable = cmd.call(); processPushResult(resultIterable); return true; } catch (TransportException e) { if (e.getMessage() .contains( CONST_AUTHENTICATION_IS_REQUIRED_BUT_NO_CREDENTIALS_PROVIDER_HAS_BEEN_REGISTERED) || e.getMessage() .contains(CONST_NOT_AUTHORIZED)) { // when the cached credential does not work if (promptUsernamePassword()) { return push(type); } } else { throw new HopException("There was an error doing a git push", e); } } catch (Exception e) { throw new HopException("There was an error doing a git push", e); } return false; } private void processPushResult(Iterable<PushResult> resultIterable) { resultIterable.forEach( result -> { // for each (push)url StringBuilder sb = new StringBuilder(); result.getRemoteUpdates().stream() .filter(update -> update.getStatus() != RemoteRefUpdate.Status.OK) .filter(update -> update.getStatus() != RemoteRefUpdate.Status.UP_TO_DATE) .forEach( update -> // for each failed refspec sb.append( result.getURI().toString() + "\n" + update.getSrcRef() + "\n" + update.getStatus().toString() + (update.getMessage() == null ? "" : "\n" + update.getMessage()) + "\n\n")); if (sb.isEmpty()) { showMessageBox( BaseMessages.getString(PKG, "Dialog.Success"), BaseMessages.getString(PKG, "Dialog.Success")); } else { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), sb.toString()); } }); } public String diff(String oldCommitId, String newCommitId) { return diff(oldCommitId, newCommitId, null); } public String diff(String oldCommitId, String newCommitId, String file) { ByteArrayOutputStream out = new ByteArrayOutputStream(); try { getDiffCommand(oldCommitId, newCommitId) .setOutputStream(out) .setPathFilter(file == null ? TreeFilter.ALL : PathFilter.create(file)) .call(); return out.toString(StandardCharsets.UTF_8); } catch (Exception e) { return e.getMessage(); } } public InputStream open(String file, String commitId) throws HopException { if (commitId.equals(WORKINGTREE)) { String baseDirectory = getDirectory(); String filePath = baseDirectory + Const.FILE_SEPARATOR + file; try { return HopVfs.getInputStream(filePath); } catch (HopFileException e) { throw new HopException("Unable to find working tree file '" + filePath + "'", e); } } RevCommit commit = resolve(commitId); RevTree tree = commit.getTree(); try (TreeWalk tw = new TreeWalk(git.getRepository())) { tw.addTree(tree); tw.setFilter(PathFilter.create(file)); tw.setRecursive(true); tw.next(); ObjectLoader loader = git.getRepository().open(tw.getObjectId(0)); return loader.openStream(); } catch (MissingObjectException e) { throw new HopException( "Unable to find file '" + file + CONST_FOR_COMMIT_ID + commitId + "", e); } catch (IncorrectObjectTypeException e) { throw new HopException( "Incorrect object type error for file '" + file + CONST_FOR_COMMIT_ID + commitId + "", e); } catch (CorruptObjectException e) { throw new HopException( "Corrupt object error for file '" + file + CONST_FOR_COMMIT_ID + commitId + "", e); } catch (IOException e) { throw new HopException( "Error reading git file '" + file + CONST_FOR_COMMIT_ID + commitId + "", e); } } public boolean cloneRepo(String directory, String uri) { CloneCommand cmd = Git.cloneRepository(); cmd.setDirectory(new File(directory)); cmd.setURI(uri); cmd.setCredentialsProvider(credentialsProvider); try { Git gitClone = cmd.call(); gitClone.close(); return true; } catch (Exception e) { if ((e instanceof TransportException) && (e.getMessage() .contains( CONST_AUTHENTICATION_IS_REQUIRED_BUT_NO_CREDENTIALS_PROVIDER_HAS_BEEN_REGISTERED) || e.getMessage().contains(CONST_NOT_AUTHORIZED))) { if (promptUsernamePassword()) { return cloneRepo(directory, uri); } } else { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } return false; } public void checkout(String name) { try { git.checkout().setName(name).call(); } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } public void checkoutBranch(String name) { checkout(name); } public void checkoutTag(String name) { checkout(name); } public void revertPath(String path) throws HopException { try { // Revert files to HEAD state Status status = git.status().addPath(path).call(); if (!status.getUntracked().isEmpty() || !status.getAdded().isEmpty()) { resetPath(path); org.apache.commons.io.FileUtils.deleteQuietly(new File(directory, path)); } /* * This is a work-around to discard changes of conflicting files * Git CLI `git checkout -- conflicted.txt` discards the changes, but jgit does not */ git.add().addFilepattern(path).call(); git.checkout().setStartPoint(Constants.HEAD).addPath(path).call(); org.apache.commons.io.FileUtils.deleteQuietly(new File(directory, path + CONST_OURS)); org.apache.commons.io.FileUtils.deleteQuietly(new File(directory, path + CONST_THEIRS)); } catch (Exception e) { throw new HopException("Git: error reverting path '" + path + "'", e); } } /** * Get the list of files which will be reverted. * * @param path The path to revert * @return The list of affected files */ public List<String> getRevertPathFiles(String path) throws HopException { try { Set<String> files = new HashSet<>(); StatusCommand statusCommand = git.status(); if (path != null && !".".equals(path)) { statusCommand = statusCommand.addPath(path); } // Get files to be reverted to HEAD state // Status status = statusCommand.call(); files.addAll(status.getUntracked()); files.addAll(status.getAdded()); files.addAll(status.getMissing()); files.addAll(status.getChanged()); files.addAll(status.getUncommittedChanges()); return new ArrayList<>(files); } catch (Exception e) { throw new HopException("Git: error reverting path files for '" + path + "'", e); } } public boolean createBranch(String value) { try { git.branchCreate().setName(value).call(); checkoutBranch(getExpandedName(value, VCS.TYPE_BRANCH)); return true; } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); return false; } } public boolean deleteBranch(String name, boolean force) { try { git.branchDelete() .setBranchNames(getExpandedName(name, VCS.TYPE_BRANCH)) .setForce(force) .call(); return true; } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); return false; } } public boolean mergeBranch(String value, MergeStrategy mergeStrategy) throws HopException { try { ObjectId obj = git.getRepository().resolve(value); MergeResult result = git.merge().include(obj).setStrategy(mergeStrategy).call(); if (result.getMergeStatus().isSuccessful()) { return true; } else { // TODO: get rid of message box // showMessageBox( BaseMessages.getString(PKG, CONST_DIALOG_ERROR), result.getMergeStatus().toString()); if (result.getMergeStatus() == MergeStatus.CONFLICTING) { Map<String, int[][]> conflicts = result.getConflicts(); for (String path : conflicts.keySet()) { checkout(path, Constants.HEAD, CONST_OURS); checkout(path, getExpandedName(value, VCS.TYPE_BRANCH), CONST_THEIRS); } return true; } } return false; } catch (Exception e) { throw new HopException( "Error merging branch '" + value + "' with strategy '" + mergeStrategy + "'", e); } } private boolean hasUncommittedChanges() { try { return git.status().call().hasUncommittedChanges(); } catch (NoWorkTreeException | GitAPIException e) { e.printStackTrace(); return false; } } private void checkout(String path, String commitId, String postfix) throws HopException { InputStream stream = open(path, commitId); File file = new File(directory + Const.FILE_SEPARATOR + path + postfix); try { org.apache.commons.io.FileUtils.copyInputStreamToFile(stream, file); stream.close(); } catch (IOException e) { throw new HopException( "Error checking out file '" + path + CONST_FOR_COMMIT_ID + commitId + "' and postfix " + postfix, e); } } private DiffCommand getDiffCommand(String oldCommitId, String newCommitId) throws Exception { return git.diff() .setOldTree(getTreeIterator(oldCommitId)) .setNewTree(getTreeIterator(newCommitId)); } private AbstractTreeIterator getTreeIterator(String commitId) throws Exception { if (commitId == null) { return new EmptyTreeIterator(); } if (commitId.equals(WORKINGTREE)) { return new FileTreeIterator(git.getRepository()); } else if (commitId.equals(INDEX)) { return new DirCacheIterator(git.getRepository().readDirCache()); } else { ObjectId id = git.getRepository().resolve(commitId); if (id == null) { // commitId does not exist return new EmptyTreeIterator(); } else { CanonicalTreeParser treeIterator = new CanonicalTreeParser(); try (RevWalk rw = new RevWalk(git.getRepository())) { RevTree tree = rw.parseTree(id); try (ObjectReader reader = git.getRepository().newObjectReader()) { treeIterator.reset(reader, tree.getId()); } } return treeIterator; } } } public String getShortenedName(String name) { if (name.length() == Constants.OBJECT_ID_STRING_LENGTH) { return name.substring(0, 7); } else { return Repository.shortenRefName(name); } } public boolean isClean() { try { return git.status().call().isClean(); } catch (Exception e) { e.printStackTrace(); return false; } } public List<String> getTags() { try { return git.tagList().call().stream() .map(ref -> Repository.shortenRefName(ref.getName())) .toList(); } catch (GitAPIException e) { e.printStackTrace(); } return null; } public boolean createTag(String name) { try { git.tag().setName(name).call(); return true; } catch (Exception e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); return false; } } public boolean deleteTag(String name) { try { git.tagDelete().setTags(getExpandedName(name, VCS.TYPE_TAG)).call(); return true; } catch (GitAPIException e) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); return false; } } public String getExpandedName(String name, String type) { switch (type) { case TYPE_TAG: return Constants.R_TAGS + name; case TYPE_BRANCH: try { return git.getRepository().findRef(Constants.R_HEADS + name).getName(); } catch (Exception e) { try { return git.getRepository().findRef(Constants.R_REMOTES + name).getName(); } catch (Exception e1) { showMessageBox(BaseMessages.getString(PKG, CONST_DIALOG_ERROR), e.getMessage()); } } default: return getCommitId(name); } } @Override public void setCredential(String username, String password) { credentialsProvider = new UsernamePasswordCredentialsProvider(username, password); } public RevCommit resolve(String commitId) { ObjectId id = null; try { id = git.getRepository().resolve(commitId); } catch (RevisionSyntaxException | IOException e1) { e1.printStackTrace(); } try (RevWalk rw = new RevWalk(git.getRepository())) { RevObject obj = rw.parseAny(id); return (RevCommit) obj; } catch (IOException e) { e.printStackTrace(); } return null; } @VisibleForTesting EnterSelectionDialog getEnterSelectionDialog(String[] choices, String shellText, String message) { return new EnterSelectionDialog(HopGui.getInstance().getShell(), choices, shellText, message); } public Set<String> getIgnored(String path) { try { StatusCommand statusCommand = git.status(); if (path != null && !".".equals(path)) { statusCommand = statusCommand.addPath(path); } Status status = statusCommand.call(); return status.getIgnoredNotInIndex(); } catch (GitAPIException e) { LogChannel.UI.logError("Error getting list of files ignored by git", e); return new HashSet<>(); } } public Git getGit() { return git; } }
googleapis/sdk-platform-java
35,007
java-common-protos/proto-google-common-protos/src/main/java/com/google/api/FieldPolicy.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/policy.proto // Protobuf Java Version: 3.25.8 package com.google.api; /** * * * <pre> * Google API Policy Annotation * * This message defines a simple API policy annotation that can be used to * annotate API request and response message fields with applicable policies. * One field may have multiple applicable policies that must all be satisfied * before a request can be processed. This policy annotation is used to * generate the overall policy that will be used for automatic runtime * policy enforcement and documentation generation. * </pre> * * Protobuf type {@code google.api.FieldPolicy} */ public final class FieldPolicy extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.FieldPolicy) FieldPolicyOrBuilder { private static final long serialVersionUID = 0L; // Use FieldPolicy.newBuilder() to construct. private FieldPolicy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FieldPolicy() { selector_ = ""; resourcePermission_ = ""; resourceType_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FieldPolicy(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.PolicyProto.internal_static_google_api_FieldPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.PolicyProto.internal_static_google_api_FieldPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.FieldPolicy.class, com.google.api.FieldPolicy.Builder.class); } public static final int SELECTOR_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object selector_ = ""; /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @return The selector. */ @java.lang.Override public java.lang.String getSelector() { java.lang.Object ref = selector_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); selector_ = s; return s; } } /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @return The bytes for selector. */ @java.lang.Override public com.google.protobuf.ByteString getSelectorBytes() { java.lang.Object ref = selector_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); selector_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_PERMISSION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object resourcePermission_ = ""; /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @return The resourcePermission. */ @java.lang.Override public java.lang.String getResourcePermission() { java.lang.Object ref = resourcePermission_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourcePermission_ = s; return s; } } /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @return The bytes for resourcePermission. */ @java.lang.Override public com.google.protobuf.ByteString getResourcePermissionBytes() { java.lang.Object ref = resourcePermission_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourcePermission_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_TYPE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object resourceType_ = ""; /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @return The resourceType. */ @java.lang.Override public java.lang.String getResourceType() { java.lang.Object ref = resourceType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceType_ = s; return s; } } /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @return The bytes for resourceType. */ @java.lang.Override public com.google.protobuf.ByteString getResourceTypeBytes() { java.lang.Object ref = resourceType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(selector_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, selector_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourcePermission_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourcePermission_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, resourceType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(selector_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, selector_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourcePermission_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourcePermission_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, resourceType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.FieldPolicy)) { return super.equals(obj); } com.google.api.FieldPolicy other = (com.google.api.FieldPolicy) obj; if (!getSelector().equals(other.getSelector())) return false; if (!getResourcePermission().equals(other.getResourcePermission())) return false; if (!getResourceType().equals(other.getResourceType())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SELECTOR_FIELD_NUMBER; hash = (53 * hash) + getSelector().hashCode(); hash = (37 * hash) + RESOURCE_PERMISSION_FIELD_NUMBER; hash = (53 * hash) + getResourcePermission().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + getResourceType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.FieldPolicy parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.FieldPolicy parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.FieldPolicy parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.FieldPolicy parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.FieldPolicy parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.FieldPolicy parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.FieldPolicy parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.FieldPolicy parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.FieldPolicy parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.FieldPolicy parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.FieldPolicy parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.FieldPolicy parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.api.FieldPolicy prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Google API Policy Annotation * * This message defines a simple API policy annotation that can be used to * annotate API request and response message fields with applicable policies. * One field may have multiple applicable policies that must all be satisfied * before a request can be processed. This policy annotation is used to * generate the overall policy that will be used for automatic runtime * policy enforcement and documentation generation. * </pre> * * Protobuf type {@code google.api.FieldPolicy} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.FieldPolicy) com.google.api.FieldPolicyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.PolicyProto.internal_static_google_api_FieldPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.PolicyProto.internal_static_google_api_FieldPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.FieldPolicy.class, com.google.api.FieldPolicy.Builder.class); } // Construct using com.google.api.FieldPolicy.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; selector_ = ""; resourcePermission_ = ""; resourceType_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.PolicyProto.internal_static_google_api_FieldPolicy_descriptor; } @java.lang.Override public com.google.api.FieldPolicy getDefaultInstanceForType() { return com.google.api.FieldPolicy.getDefaultInstance(); } @java.lang.Override public com.google.api.FieldPolicy build() { com.google.api.FieldPolicy result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.FieldPolicy buildPartial() { com.google.api.FieldPolicy result = new com.google.api.FieldPolicy(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.api.FieldPolicy result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.selector_ = selector_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resourcePermission_ = resourcePermission_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.resourceType_ = resourceType_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.FieldPolicy) { return mergeFrom((com.google.api.FieldPolicy) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.api.FieldPolicy other) { if (other == com.google.api.FieldPolicy.getDefaultInstance()) return this; if (!other.getSelector().isEmpty()) { selector_ = other.selector_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getResourcePermission().isEmpty()) { resourcePermission_ = other.resourcePermission_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getResourceType().isEmpty()) { resourceType_ = other.resourceType_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { selector_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { resourcePermission_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { resourceType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object selector_ = ""; /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @return The selector. */ public java.lang.String getSelector() { java.lang.Object ref = selector_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); selector_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @return The bytes for selector. */ public com.google.protobuf.ByteString getSelectorBytes() { java.lang.Object ref = selector_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); selector_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @param value The selector to set. * @return This builder for chaining. */ public Builder setSelector(java.lang.String value) { if (value == null) { throw new NullPointerException(); } selector_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @return This builder for chaining. */ public Builder clearSelector() { selector_ = getDefaultInstance().getSelector(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Selects one or more request or response message fields to apply this * `FieldPolicy`. * * When a `FieldPolicy` is used in proto annotation, the selector must * be left as empty. The service config generator will automatically fill * the correct value. * * When a `FieldPolicy` is used in service config, the selector must be a * comma-separated string with valid request or response field paths, * such as "foo.bar" or "foo.bar,foo.baz". * </pre> * * <code>string selector = 1;</code> * * @param value The bytes for selector to set. * @return This builder for chaining. */ public Builder setSelectorBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); selector_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object resourcePermission_ = ""; /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @return The resourcePermission. */ public java.lang.String getResourcePermission() { java.lang.Object ref = resourcePermission_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourcePermission_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @return The bytes for resourcePermission. */ public com.google.protobuf.ByteString getResourcePermissionBytes() { java.lang.Object ref = resourcePermission_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourcePermission_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @param value The resourcePermission to set. * @return This builder for chaining. */ public Builder setResourcePermission(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourcePermission_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @return This builder for chaining. */ public Builder clearResourcePermission() { resourcePermission_ = getDefaultInstance().getResourcePermission(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Specifies the required permission(s) for the resource referred to by the * field. It requires the field contains a valid resource reference, and * the request must pass the permission checks to proceed. For example, * "resourcemanager.projects.get". * </pre> * * <code>string resource_permission = 2;</code> * * @param value The bytes for resourcePermission to set. * @return This builder for chaining. */ public Builder setResourcePermissionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourcePermission_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object resourceType_ = ""; /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @return The resourceType. */ public java.lang.String getResourceType() { java.lang.Object ref = resourceType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @return The bytes for resourceType. */ public com.google.protobuf.ByteString getResourceTypeBytes() { java.lang.Object ref = resourceType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @param value The resourceType to set. * @return This builder for chaining. */ public Builder setResourceType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @return This builder for chaining. */ public Builder clearResourceType() { resourceType_ = getDefaultInstance().getResourceType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Specifies the resource type for the resource referred to by the field. * </pre> * * <code>string resource_type = 3;</code> * * @param value The bytes for resourceType to set. * @return This builder for chaining. */ public Builder setResourceTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.FieldPolicy) } // @@protoc_insertion_point(class_scope:google.api.FieldPolicy) private static final com.google.api.FieldPolicy DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.FieldPolicy(); } public static com.google.api.FieldPolicy getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FieldPolicy> PARSER = new com.google.protobuf.AbstractParser<FieldPolicy>() { @java.lang.Override public FieldPolicy parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FieldPolicy> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FieldPolicy> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.FieldPolicy getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/commons-lang
35,277
src/test/java/org/apache/commons/lang3/ArrayUtilsRemoveTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3; import static org.apache.commons.lang3.LangAssertions.assertIndexOutOfBoundsException; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import org.junit.jupiter.api.Test; /** * Tests ArrayUtils remove and removeElement methods. */ class ArrayUtilsRemoveTest extends AbstractLangTest { @Test void testRemoveAllBooleanOccurences() { boolean[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, true)); a = new boolean[0]; assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, ArrayUtils.removeAllOccurences(a, true)); a = new boolean[] { true }; assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, ArrayUtils.removeAllOccurences(a, true)); a = new boolean[] { true, true }; assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, ArrayUtils.removeAllOccurences(a, true)); a = new boolean[] { false, true, true, false, true }; assertArrayEquals(new boolean[]{false, false}, ArrayUtils.removeAllOccurences(a, true)); a = new boolean[] { false, true, true, false, true }; assertArrayEquals(new boolean[]{true, true, true}, ArrayUtils.removeAllOccurences(a, false)); } @Test void testRemoveAllBooleanOccurrences() { boolean[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, true)); a = new boolean[0]; assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, ArrayUtils.removeAllOccurrences(a, true)); a = new boolean[] { true }; assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, ArrayUtils.removeAllOccurrences(a, true)); a = new boolean[] { true, true }; assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, ArrayUtils.removeAllOccurrences(a, true)); a = new boolean[] { false, true, true, false, true }; assertArrayEquals(new boolean[]{false, false}, ArrayUtils.removeAllOccurrences(a, true)); a = new boolean[] { false, true, true, false, true }; assertArrayEquals(new boolean[]{true, true, true}, ArrayUtils.removeAllOccurrences(a, false)); } @Test void testRemoveAllByteOccurences() { byte[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, (byte) 2)); a = new byte[0]; assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.removeAllOccurences(a, (byte) 2)); a = new byte[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.removeAllOccurences(a, (byte) 2)); a = new byte[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.removeAllOccurences(a, (byte) 2)); a = new byte[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new byte[]{1, 3}, ArrayUtils.removeAllOccurences(a, (byte) 2)); a = new byte[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new byte[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurences(a, (byte) 4)); } @Test void testRemoveAllByteOccurrences() { byte[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, (byte) 2)); a = new byte[0]; assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.removeAllOccurrences(a, (byte) 2)); a = new byte[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.removeAllOccurrences(a, (byte) 2)); a = new byte[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, ArrayUtils.removeAllOccurrences(a, (byte) 2)); a = new byte[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new byte[]{1, 3}, ArrayUtils.removeAllOccurrences(a, (byte) 2)); a = new byte[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new byte[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurrences(a, (byte) 4)); } @Test void testRemoveAllCharOccurences() { char[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, '2')); a = new char[0]; assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, ArrayUtils.removeAllOccurences(a, '2')); a = new char[] { '2' }; assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, ArrayUtils.removeAllOccurences(a, '2')); a = new char[] { '2', '2' }; assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, ArrayUtils.removeAllOccurences(a, '2')); a = new char[] { '1', '2', '2', '3', '2' }; assertArrayEquals(new char[]{'1', '3'}, ArrayUtils.removeAllOccurences(a, '2')); a = new char[] { '1', '2', '2', '3', '2' }; assertArrayEquals(new char[]{'1', '2', '2', '3', '2'}, ArrayUtils.removeAllOccurences(a, '4')); } @Test void testRemoveAllCharOccurrences() { char[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, '2')); a = new char[0]; assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, ArrayUtils.removeAllOccurrences(a, '2')); a = new char[] { '2' }; assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, ArrayUtils.removeAllOccurrences(a, '2')); a = new char[] { '2', '2' }; assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, ArrayUtils.removeAllOccurrences(a, '2')); a = new char[] { '1', '2', '2', '3', '2' }; assertArrayEquals(new char[]{'1', '3'}, ArrayUtils.removeAllOccurrences(a, '2')); a = new char[] { '1', '2', '2', '3', '2' }; assertArrayEquals(new char[]{'1', '2', '2', '3', '2'}, ArrayUtils.removeAllOccurrences(a, '4')); } @Test void testRemoveAllDoubleOccurences() { double[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, 2)); a = new double[0]; assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new double[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new double[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new double[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new double[]{1, 3}, ArrayUtils.removeAllOccurences(a, 2)); a = new double[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new double[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurences(a, 4)); } @Test void testRemoveAllDoubleOccurrences() { double[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, 2)); a = new double[0]; assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new double[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new double[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new double[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new double[]{1, 3}, ArrayUtils.removeAllOccurrences(a, 2)); a = new double[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new double[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurrences(a, 4)); } @Test void testRemoveAllFloatOccurences() { float[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, 2)); a = new float[0]; assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new float[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new float[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new float[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new float[]{1, 3}, ArrayUtils.removeAllOccurences(a, 2)); a = new float[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new float[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurences(a, 4)); } @Test void testRemoveAllFloatOccurrences() { float[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, 2)); a = new float[0]; assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new float[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new float[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new float[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new float[]{1, 3}, ArrayUtils.removeAllOccurrences(a, 2)); a = new float[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new float[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurrences(a, 4)); } @Test void testRemoveAllIntOccurences() { int[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, 2)); a = new int[0]; assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new int[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new int[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new int[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new int[]{1, 3}, ArrayUtils.removeAllOccurences(a, 2)); a = new int[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new int[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurences(a, 4)); } @Test void testRemoveAllIntOccurrences() { int[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, 2)); a = new int[0]; assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new int[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new int[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new int[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new int[]{1, 3}, ArrayUtils.removeAllOccurrences(a, 2)); a = new int[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new int[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurrences(a, 4)); } @Test void testRemoveAllLongOccurences() { long[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, 2)); a = new long[0]; assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new long[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new long[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, ArrayUtils.removeAllOccurences(a, 2)); a = new long[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new long[]{1, 3}, ArrayUtils.removeAllOccurences(a, 2)); a = new long[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new long[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurences(a, 4)); } @Test void testRemoveAllLongOccurrences() { long[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, 2)); a = new long[0]; assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new long[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new long[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, ArrayUtils.removeAllOccurrences(a, 2)); a = new long[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new long[]{1, 3}, ArrayUtils.removeAllOccurrences(a, 2)); a = new long[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new long[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurrences(a, 4)); } @Test void testRemoveAllObjectOccurences() { String[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, "2")); a = new String[0]; assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.removeAllOccurences(a, "2")); a = new String[] { "2" }; assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.removeAllOccurences(a, "2")); a = new String[] { "2", "2" }; assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.removeAllOccurences(a, "2")); a = new String[] { "1", "2", "2", "3", "2" }; assertArrayEquals(new String[]{"1", "3"}, ArrayUtils.removeAllOccurences(a, "2")); a = new String[] { "1", "2", "2", "3", "2" }; assertArrayEquals(new String[]{"1", "2", "2", "3", "2"}, ArrayUtils.removeAllOccurences(a, "4")); } @Test void testRemoveAllObjectOccurrences() { String[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, "2")); a = new String[0]; assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.removeAllOccurrences(a, "2")); a = new String[] { "2" }; assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.removeAllOccurrences(a, "2")); a = new String[] { "2", "2" }; assertArrayEquals(ArrayUtils.EMPTY_STRING_ARRAY, ArrayUtils.removeAllOccurrences(a, "2")); a = new String[] { "1", "2", "2", "3", "2" }; assertArrayEquals(new String[]{"1", "3"}, ArrayUtils.removeAllOccurrences(a, "2")); a = new String[] { "1", "2", "2", "3", "2" }; assertArrayEquals(new String[]{"1", "2", "2", "3", "2"}, ArrayUtils.removeAllOccurrences(a, "4")); } @Test void testRemoveAllShortOccurences() { short[] a = null; assertNull(ArrayUtils.removeAllOccurences(a, (short) 2)); a = new short[0]; assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, ArrayUtils.removeAllOccurences(a, (short) 2)); a = new short[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, ArrayUtils.removeAllOccurences(a, (short) 2)); a = new short[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, ArrayUtils.removeAllOccurences(a, (short) 2)); a = new short[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new short[]{1, 3}, ArrayUtils.removeAllOccurences(a, (short) 2)); a = new short[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new short[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurences(a, (short) 4)); } @Test void testRemoveAllShortOccurrences() { short[] a = null; assertNull(ArrayUtils.removeAllOccurrences(a, (short) 2)); a = new short[0]; assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, ArrayUtils.removeAllOccurrences(a, (short) 2)); a = new short[] { 2 }; assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, ArrayUtils.removeAllOccurrences(a, (short) 2)); a = new short[] { 2, 2 }; assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, ArrayUtils.removeAllOccurrences(a, (short) 2)); a = new short[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new short[]{1, 3}, ArrayUtils.removeAllOccurrences(a, (short) 2)); a = new short[] { 1, 2, 2, 3, 2 }; assertArrayEquals(new short[]{1, 2, 2, 3, 2}, ArrayUtils.removeAllOccurrences(a, (short) 4)); } @Test void testRemoveBooleanArray() { boolean[] array; array = ArrayUtils.remove(new boolean[] {true}, 0); assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new boolean[] {true, false}, 0); assertArrayEquals(new boolean[]{false}, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new boolean[] {true, false}, 1); assertArrayEquals(new boolean[]{true}, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new boolean[] {true, false, true}, 1); assertArrayEquals(new boolean[]{true, true}, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new boolean[] {true, false}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new boolean[] {true, false}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((boolean[]) null, 0)); } @Test void testRemoveByteArray() { byte[] array; array = ArrayUtils.remove(new byte[] {1}, 0); assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new byte[] {1, 2}, 0); assertArrayEquals(new byte[]{2}, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new byte[] {1, 2}, 1); assertArrayEquals(new byte[]{1}, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new byte[] {1, 2, 1}, 1); assertArrayEquals(new byte[]{1, 1}, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new byte[] {1, 2}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new byte[] {1, 2}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((byte[]) null, 0)); } @Test void testRemoveCharArray() { char[] array; array = ArrayUtils.remove(new char[] {'a'}, 0); assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new char[] {'a', 'b'}, 0); assertArrayEquals(new char[]{'b'}, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new char[] {'a', 'b'}, 1); assertArrayEquals(new char[]{'a'}, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new char[] {'a', 'b', 'c'}, 1); assertArrayEquals(new char[]{'a', 'c'}, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new char[] {'a', 'b'}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new char[] {'a', 'b'}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((char[]) null, 0)); } @Test void testRemoveDoubleArray() { double[] array; array = ArrayUtils.remove(new double[] {1}, 0); assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new double[] {1, 2}, 0); assertArrayEquals(new double[]{2}, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new double[] {1, 2}, 1); assertArrayEquals(new double[]{1}, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new double[] {1, 2, 1}, 1); assertArrayEquals(new double[]{1, 1}, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new double[] {1, 2}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new double[] {1, 2}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((double[]) null, 0)); } @Test void testRemoveElementBooleanArray() { boolean[] array; array = ArrayUtils.removeElement(null, true); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_BOOLEAN_ARRAY, true); assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new boolean[] {true}, true); assertArrayEquals(ArrayUtils.EMPTY_BOOLEAN_ARRAY, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new boolean[] {true, false}, true); assertArrayEquals(new boolean[]{false}, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new boolean[] {true, false, true}, true); assertArrayEquals(new boolean[]{false, true}, array); assertEquals(Boolean.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementByteArray() { byte[] array; array = ArrayUtils.removeElement((byte[]) null, (byte) 1); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_BYTE_ARRAY, (byte) 1); assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new byte[] {1}, (byte) 1); assertArrayEquals(ArrayUtils.EMPTY_BYTE_ARRAY, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new byte[] {1, 2}, (byte) 1); assertArrayEquals(new byte[]{2}, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new byte[] {1, 2, 1}, (byte) 1); assertArrayEquals(new byte[]{2, 1}, array); assertEquals(Byte.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementCharArray() { char[] array; array = ArrayUtils.removeElement((char[]) null, 'a'); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_CHAR_ARRAY, 'a'); assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new char[] {'a'}, 'a'); assertArrayEquals(ArrayUtils.EMPTY_CHAR_ARRAY, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new char[] {'a', 'b'}, 'a'); assertArrayEquals(new char[]{'b'}, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new char[] {'a', 'b', 'a'}, 'a'); assertArrayEquals(new char[]{'b', 'a'}, array); assertEquals(Character.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementDoubleArray() { double[] array; array = ArrayUtils.removeElement(null, (double) 1); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_DOUBLE_ARRAY, 1); assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new double[] {1}, 1); assertArrayEquals(ArrayUtils.EMPTY_DOUBLE_ARRAY, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new double[] {1, 2}, 1); assertArrayEquals(new double[]{2}, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new double[] {1, 2, 1}, 1); assertArrayEquals(new double[]{2, 1}, array); assertEquals(Double.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementFloatArray() { float[] array; array = ArrayUtils.removeElement((float[]) null, 1); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_FLOAT_ARRAY, 1); assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new float[] {1}, 1); assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new float[] {1, 2}, 1); assertArrayEquals(new float[]{2}, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new float[] {1, 2, 1}, 1); assertArrayEquals(new float[]{2, 1}, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementIntArray() { int[] array; array = ArrayUtils.removeElement((int[]) null, 1); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_INT_ARRAY, 1); assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new int[] {1}, 1); assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new int[] {1, 2}, 1); assertArrayEquals(new int[]{2}, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new int[] {1, 2, 1}, 1); assertArrayEquals(new int[]{2, 1}, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementLongArray() { long[] array; array = ArrayUtils.removeElement((long[]) null, 1L); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_LONG_ARRAY, 1L); assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new long[] {1}, 1L); assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new long[] {1, 2}, 1L); assertArrayEquals(new long[]{2}, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new long[] {1, 2, 1}, 1L); assertArrayEquals(new long[]{2, 1}, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); } @Test void testRemoveElementObjectArray() { Object[] array; array = ArrayUtils.removeElement(null, "a"); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_OBJECT_ARRAY, "a"); assertArrayEquals(ArrayUtils.EMPTY_OBJECT_ARRAY, array); assertEquals(Object.class, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new Object[] {"a"}, "a"); assertArrayEquals(ArrayUtils.EMPTY_OBJECT_ARRAY, array); assertEquals(Object.class, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new Object[] {"a", "b"}, "a"); assertArrayEquals(new Object[]{"b"}, array); assertEquals(Object.class, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new Object[] {"a", "b", "a"}, "a"); assertArrayEquals(new Object[]{"b", "a"}, array); assertEquals(Object.class, array.getClass().getComponentType()); } @Test void testRemoveElementShortArray() { short[] array; array = ArrayUtils.removeElement((short[]) null, (short) 1); assertNull(array); array = ArrayUtils.removeElement(ArrayUtils.EMPTY_SHORT_ARRAY, (short) 1); assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new short[] {1}, (short) 1); assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new short[] {1, 2}, (short) 1); assertArrayEquals(new short[]{2}, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); array = ArrayUtils.removeElement(new short[] {1, 2, 1}, (short) 1); assertArrayEquals(new short[]{2, 1}, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); } @Test void testRemoveFloatArray() { float[] array; array = ArrayUtils.remove(new float[] {1}, 0); assertArrayEquals(ArrayUtils.EMPTY_FLOAT_ARRAY, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new float[] {1, 2}, 0); assertArrayEquals(new float[]{2}, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new float[] {1, 2}, 1); assertArrayEquals(new float[]{1}, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new float[] {1, 2, 1}, 1); assertArrayEquals(new float[]{1, 1}, array); assertEquals(Float.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new float[] {1, 2}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new float[] {1, 2}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((float[]) null, 0)); } @Test void testRemoveIntArray() { int[] array; array = ArrayUtils.remove(new int[] {1}, 0); assertArrayEquals(ArrayUtils.EMPTY_INT_ARRAY, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new int[] {1, 2}, 0); assertArrayEquals(new int[]{2}, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new int[] {1, 2}, 1); assertArrayEquals(new int[]{1}, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new int[] {1, 2, 1}, 1); assertArrayEquals(new int[]{1, 1}, array); assertEquals(Integer.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new int[] {1, 2}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new int[] {1, 2}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((int[]) null, 0)); } @Test void testRemoveLongArray() { long[] array; array = ArrayUtils.remove(new long[] {1}, 0); assertArrayEquals(ArrayUtils.EMPTY_LONG_ARRAY, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new long[] {1, 2}, 0); assertArrayEquals(new long[]{2}, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new long[] {1, 2}, 1); assertArrayEquals(new long[]{1}, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new long[] {1, 2, 1}, 1); assertArrayEquals(new long[]{1, 1}, array); assertEquals(Long.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new long[] {1, 2}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new long[] {1, 2}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((long[]) null, 0)); } @Test void testRemoveNumberArray() { final Number[] inarray = {Integer.valueOf(1), Long.valueOf(2), Byte.valueOf((byte) 3)}; assertEquals(3, inarray.length); Number[] outarray; outarray = ArrayUtils.remove(inarray, 1); assertEquals(2, outarray.length); assertEquals(Number.class, outarray.getClass().getComponentType()); outarray = ArrayUtils.remove(outarray, 1); assertEquals(1, outarray.length); assertEquals(Number.class, outarray.getClass().getComponentType()); outarray = ArrayUtils.remove(outarray, 0); assertEquals(0, outarray.length); assertEquals(Number.class, outarray.getClass().getComponentType()); } @Test void testRemoveObjectArray() { Object[] array; array = ArrayUtils.remove(new Object[] {"a"}, 0); assertArrayEquals(ArrayUtils.EMPTY_OBJECT_ARRAY, array); assertEquals(Object.class, array.getClass().getComponentType()); array = ArrayUtils.remove(new Object[] {"a", "b"}, 0); assertArrayEquals(new Object[]{"b"}, array); assertEquals(Object.class, array.getClass().getComponentType()); array = ArrayUtils.remove(new Object[] {"a", "b"}, 1); assertArrayEquals(new Object[]{"a"}, array); assertEquals(Object.class, array.getClass().getComponentType()); array = ArrayUtils.remove(new Object[] {"a", "b", "c"}, 1); assertArrayEquals(new Object[]{"a", "c"}, array); assertEquals(Object.class, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new Object[] {"a", "b"}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new Object[] {"a", "b"}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((Object[]) null, 0)); } @Test void testRemoveShortArray() { short[] array; array = ArrayUtils.remove(new short[] {1}, 0); assertArrayEquals(ArrayUtils.EMPTY_SHORT_ARRAY, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new short[] {1, 2}, 0); assertArrayEquals(new short[]{2}, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new short[] {1, 2}, 1); assertArrayEquals(new short[]{1}, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); array = ArrayUtils.remove(new short[] {1, 2, 1}, 1); assertArrayEquals(new short[]{1, 1}, array); assertEquals(Short.TYPE, array.getClass().getComponentType()); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new short[] {1, 2}, -1)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove(new short[] {1, 2}, 2)); assertIndexOutOfBoundsException(() -> ArrayUtils.remove((short[]) null, 0)); } }
googleapis/google-cloud-java
35,268
java-bigquerydatapolicy/google-cloud-bigquerydatapolicy/src/main/java/com/google/cloud/bigquery/datapolicies/v1/stub/HttpJsonDataPolicyServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigquery.datapolicies.v1.stub; import static com.google.cloud.bigquery.datapolicies.v1.DataPolicyServiceClient.ListDataPoliciesPagedResponse; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.bigquery.datapolicies.v1.CreateDataPolicyRequest; import com.google.cloud.bigquery.datapolicies.v1.DataPolicy; import com.google.cloud.bigquery.datapolicies.v1.DeleteDataPolicyRequest; import com.google.cloud.bigquery.datapolicies.v1.GetDataPolicyRequest; import com.google.cloud.bigquery.datapolicies.v1.ListDataPoliciesRequest; import com.google.cloud.bigquery.datapolicies.v1.ListDataPoliciesResponse; import com.google.cloud.bigquery.datapolicies.v1.RenameDataPolicyRequest; import com.google.cloud.bigquery.datapolicies.v1.UpdateDataPolicyRequest; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the DataPolicyService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonDataPolicyServiceStub extends DataPolicyServiceStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build(); private static final ApiMethodDescriptor<CreateDataPolicyRequest, DataPolicy> createDataPolicyMethodDescriptor = ApiMethodDescriptor.<CreateDataPolicyRequest, DataPolicy>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/CreateDataPolicy") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateDataPolicyRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*}/dataPolicies", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("dataPolicy", request.getDataPolicy(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<DataPolicy>newBuilder() .setDefaultInstance(DataPolicy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<UpdateDataPolicyRequest, DataPolicy> updateDataPolicyMethodDescriptor = ApiMethodDescriptor.<UpdateDataPolicyRequest, DataPolicy>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/UpdateDataPolicy") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateDataPolicyRequest>newBuilder() .setPath( "/v1/{dataPolicy.name=projects/*/locations/*/dataPolicies/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "dataPolicy.name", request.getDataPolicy().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("dataPolicy", request.getDataPolicy(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<DataPolicy>newBuilder() .setDefaultInstance(DataPolicy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<RenameDataPolicyRequest, DataPolicy> renameDataPolicyMethodDescriptor = ApiMethodDescriptor.<RenameDataPolicyRequest, DataPolicy>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/RenameDataPolicy") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<RenameDataPolicyRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/dataPolicies/*}:rename", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<RenameDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<RenameDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearName().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<DataPolicy>newBuilder() .setDefaultInstance(DataPolicy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<DeleteDataPolicyRequest, Empty> deleteDataPolicyMethodDescriptor = ApiMethodDescriptor.<DeleteDataPolicyRequest, Empty>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/DeleteDataPolicy") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteDataPolicyRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/dataPolicies/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Empty>newBuilder() .setDefaultInstance(Empty.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetDataPolicyRequest, DataPolicy> getDataPolicyMethodDescriptor = ApiMethodDescriptor.<GetDataPolicyRequest, DataPolicy>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/GetDataPolicy") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetDataPolicyRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/dataPolicies/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetDataPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<DataPolicy>newBuilder() .setDefaultInstance(DataPolicy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListDataPoliciesRequest, ListDataPoliciesResponse> listDataPoliciesMethodDescriptor = ApiMethodDescriptor.<ListDataPoliciesRequest, ListDataPoliciesResponse>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/ListDataPolicies") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListDataPoliciesRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*}/dataPolicies", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListDataPoliciesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListDataPoliciesRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "filter", request.getFilter()); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListDataPoliciesResponse>newBuilder() .setDefaultInstance(ListDataPoliciesResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = ApiMethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/GetIamPolicy") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetIamPolicyRequest>newBuilder() .setPath( "/v1/{resource=projects/*/locations/*/dataPolicies/*}:getIamPolicy", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearResource().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Policy>newBuilder() .setDefaultInstance(Policy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = ApiMethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/SetIamPolicy") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<SetIamPolicyRequest>newBuilder() .setPath( "/v1/{resource=projects/*/locations/*/dataPolicies/*}:setIamPolicy", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<SetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<SetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearResource().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Policy>newBuilder() .setDefaultInstance(Policy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = ApiMethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setFullMethodName( "google.cloud.bigquery.datapolicies.v1.DataPolicyService/TestIamPermissions") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<TestIamPermissionsRequest>newBuilder() .setPath( "/v1/{resource=projects/*/locations/*/dataPolicies/*}:testIamPermissions", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<TestIamPermissionsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<TestIamPermissionsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearResource().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<TestIamPermissionsResponse>newBuilder() .setDefaultInstance(TestIamPermissionsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private final UnaryCallable<CreateDataPolicyRequest, DataPolicy> createDataPolicyCallable; private final UnaryCallable<UpdateDataPolicyRequest, DataPolicy> updateDataPolicyCallable; private final UnaryCallable<RenameDataPolicyRequest, DataPolicy> renameDataPolicyCallable; private final UnaryCallable<DeleteDataPolicyRequest, Empty> deleteDataPolicyCallable; private final UnaryCallable<GetDataPolicyRequest, DataPolicy> getDataPolicyCallable; private final UnaryCallable<ListDataPoliciesRequest, ListDataPoliciesResponse> listDataPoliciesCallable; private final UnaryCallable<ListDataPoliciesRequest, ListDataPoliciesPagedResponse> listDataPoliciesPagedCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final BackgroundResource backgroundResources; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonDataPolicyServiceStub create(DataPolicyServiceStubSettings settings) throws IOException { return new HttpJsonDataPolicyServiceStub(settings, ClientContext.create(settings)); } public static final HttpJsonDataPolicyServiceStub create(ClientContext clientContext) throws IOException { return new HttpJsonDataPolicyServiceStub( DataPolicyServiceStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonDataPolicyServiceStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonDataPolicyServiceStub( DataPolicyServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonDataPolicyServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonDataPolicyServiceStub( DataPolicyServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonDataPolicyServiceCallableFactory()); } /** * Constructs an instance of HttpJsonDataPolicyServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonDataPolicyServiceStub( DataPolicyServiceStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; HttpJsonCallSettings<CreateDataPolicyRequest, DataPolicy> createDataPolicyTransportSettings = HttpJsonCallSettings.<CreateDataPolicyRequest, DataPolicy>newBuilder() .setMethodDescriptor(createDataPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateDataPolicyRequest, DataPolicy> updateDataPolicyTransportSettings = HttpJsonCallSettings.<UpdateDataPolicyRequest, DataPolicy>newBuilder() .setMethodDescriptor(updateDataPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add( "data_policy.name", String.valueOf(request.getDataPolicy().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<RenameDataPolicyRequest, DataPolicy> renameDataPolicyTransportSettings = HttpJsonCallSettings.<RenameDataPolicyRequest, DataPolicy>newBuilder() .setMethodDescriptor(renameDataPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteDataPolicyRequest, Empty> deleteDataPolicyTransportSettings = HttpJsonCallSettings.<DeleteDataPolicyRequest, Empty>newBuilder() .setMethodDescriptor(deleteDataPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<GetDataPolicyRequest, DataPolicy> getDataPolicyTransportSettings = HttpJsonCallSettings.<GetDataPolicyRequest, DataPolicy>newBuilder() .setMethodDescriptor(getDataPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListDataPoliciesRequest, ListDataPoliciesResponse> listDataPoliciesTransportSettings = HttpJsonCallSettings.<ListDataPoliciesRequest, ListDataPoliciesResponse>newBuilder() .setMethodDescriptor(listDataPoliciesMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = HttpJsonCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = HttpJsonCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = HttpJsonCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); this.createDataPolicyCallable = callableFactory.createUnaryCallable( createDataPolicyTransportSettings, settings.createDataPolicySettings(), clientContext); this.updateDataPolicyCallable = callableFactory.createUnaryCallable( updateDataPolicyTransportSettings, settings.updateDataPolicySettings(), clientContext); this.renameDataPolicyCallable = callableFactory.createUnaryCallable( renameDataPolicyTransportSettings, settings.renameDataPolicySettings(), clientContext); this.deleteDataPolicyCallable = callableFactory.createUnaryCallable( deleteDataPolicyTransportSettings, settings.deleteDataPolicySettings(), clientContext); this.getDataPolicyCallable = callableFactory.createUnaryCallable( getDataPolicyTransportSettings, settings.getDataPolicySettings(), clientContext); this.listDataPoliciesCallable = callableFactory.createUnaryCallable( listDataPoliciesTransportSettings, settings.listDataPoliciesSettings(), clientContext); this.listDataPoliciesPagedCallable = callableFactory.createPagedCallable( listDataPoliciesTransportSettings, settings.listDataPoliciesSettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(createDataPolicyMethodDescriptor); methodDescriptors.add(updateDataPolicyMethodDescriptor); methodDescriptors.add(renameDataPolicyMethodDescriptor); methodDescriptors.add(deleteDataPolicyMethodDescriptor); methodDescriptors.add(getDataPolicyMethodDescriptor); methodDescriptors.add(listDataPoliciesMethodDescriptor); methodDescriptors.add(getIamPolicyMethodDescriptor); methodDescriptors.add(setIamPolicyMethodDescriptor); methodDescriptors.add(testIamPermissionsMethodDescriptor); return methodDescriptors; } @Override public UnaryCallable<CreateDataPolicyRequest, DataPolicy> createDataPolicyCallable() { return createDataPolicyCallable; } @Override public UnaryCallable<UpdateDataPolicyRequest, DataPolicy> updateDataPolicyCallable() { return updateDataPolicyCallable; } @Override public UnaryCallable<RenameDataPolicyRequest, DataPolicy> renameDataPolicyCallable() { return renameDataPolicyCallable; } @Override public UnaryCallable<DeleteDataPolicyRequest, Empty> deleteDataPolicyCallable() { return deleteDataPolicyCallable; } @Override public UnaryCallable<GetDataPolicyRequest, DataPolicy> getDataPolicyCallable() { return getDataPolicyCallable; } @Override public UnaryCallable<ListDataPoliciesRequest, ListDataPoliciesResponse> listDataPoliciesCallable() { return listDataPoliciesCallable; } @Override public UnaryCallable<ListDataPoliciesRequest, ListDataPoliciesPagedResponse> listDataPoliciesPagedCallable() { return listDataPoliciesPagedCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/geaflow
35,499
geaflow/geaflow-dsl/geaflow-dsl-runtime/src/main/java/org/apache/geaflow/dsl/runtime/traversal/StepLogicalPlanTranslator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.geaflow.dsl.runtime.traversal; import static org.apache.calcite.sql.SqlKind.DESCENDING; import static org.apache.geaflow.common.utils.ArrayUtil.toIntArray; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.JoinInfo; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.geaflow.common.binary.BinaryString; import org.apache.geaflow.common.type.IType; import org.apache.geaflow.dsl.calcite.EdgeRecordType; import org.apache.geaflow.dsl.calcite.PathRecordType; import org.apache.geaflow.dsl.calcite.VertexRecordType; import org.apache.geaflow.dsl.common.exception.GeaFlowDSLException; import org.apache.geaflow.dsl.common.function.UDAF; import org.apache.geaflow.dsl.common.types.GraphSchema; import org.apache.geaflow.dsl.common.types.PathType; import org.apache.geaflow.dsl.common.types.TableField; import org.apache.geaflow.dsl.common.util.BinaryUtil; import org.apache.geaflow.dsl.common.util.TypeCastUtil; import org.apache.geaflow.dsl.rel.AbstractMatchNodeVisitor; import org.apache.geaflow.dsl.rel.GraphMatch; import org.apache.geaflow.dsl.rel.PathModify.PathModifyExpression; import org.apache.geaflow.dsl.rel.PathSort; import org.apache.geaflow.dsl.rel.match.EdgeMatch; import org.apache.geaflow.dsl.rel.match.IMatchNode; import org.apache.geaflow.dsl.rel.match.LoopUntilMatch; import org.apache.geaflow.dsl.rel.match.MatchAggregate; import org.apache.geaflow.dsl.rel.match.MatchDistinct; import org.apache.geaflow.dsl.rel.match.MatchExtend; import org.apache.geaflow.dsl.rel.match.MatchFilter; import org.apache.geaflow.dsl.rel.match.MatchJoin; import org.apache.geaflow.dsl.rel.match.MatchPathModify; import org.apache.geaflow.dsl.rel.match.MatchPathSort; import org.apache.geaflow.dsl.rel.match.MatchUnion; import org.apache.geaflow.dsl.rel.match.OptionalEdgeMatch; import org.apache.geaflow.dsl.rel.match.OptionalVertexMatch; import org.apache.geaflow.dsl.rel.match.SubQueryStart; import org.apache.geaflow.dsl.rel.match.VertexMatch; import org.apache.geaflow.dsl.rel.match.VirtualEdgeMatch; import org.apache.geaflow.dsl.rex.MatchAggregateCall; import org.apache.geaflow.dsl.rex.RexObjectConstruct.VariableInfo; import org.apache.geaflow.dsl.runtime.expression.Expression; import org.apache.geaflow.dsl.runtime.expression.ExpressionTranslator; import org.apache.geaflow.dsl.runtime.expression.field.FieldExpression; import org.apache.geaflow.dsl.runtime.expression.field.ParameterFieldExpression; import org.apache.geaflow.dsl.runtime.function.graph.MatchEdgeFunction; import org.apache.geaflow.dsl.runtime.function.graph.MatchEdgeFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.MatchVertexFunction; import org.apache.geaflow.dsl.runtime.function.graph.MatchVertexFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.MatchVirtualEdgeFunction; import org.apache.geaflow.dsl.runtime.function.graph.MatchVirtualEdgeFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.StepAggExpressionFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.StepAggExpressionFunctionImpl.StepAggCall; import org.apache.geaflow.dsl.runtime.function.graph.StepAggregateFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepBoolFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepBoolFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.StepJoinFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepJoinFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.StepKeyExpressionFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.StepKeyFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepKeyFunctionImpl; import org.apache.geaflow.dsl.runtime.function.graph.StepNodeTypeFilterFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepPathModifyFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepSortFunction; import org.apache.geaflow.dsl.runtime.function.graph.StepSortFunctionImpl; import org.apache.geaflow.dsl.runtime.function.table.order.OrderByField; import org.apache.geaflow.dsl.runtime.function.table.order.OrderByField.ORDER; import org.apache.geaflow.dsl.runtime.function.table.order.SortInfo; import org.apache.geaflow.dsl.runtime.plan.PhysicAggregateRelNode; import org.apache.geaflow.dsl.runtime.traversal.operator.MatchEdgeOperator; import org.apache.geaflow.dsl.runtime.traversal.operator.MatchVertexOperator; import org.apache.geaflow.dsl.runtime.traversal.operator.StepLocalExchangeOperator; import org.apache.geaflow.dsl.runtime.traversal.operator.StepLocalSingleValueAggregateOperator; import org.apache.geaflow.dsl.runtime.traversal.operator.StepNodeFilterOperator; import org.apache.geaflow.dsl.runtime.traversal.operator.StepSourceOperator; import org.apache.geaflow.dsl.runtime.traversal.operator.StepSourceOperator.ConstantStartId; import org.apache.geaflow.dsl.runtime.traversal.operator.StepSourceOperator.ParameterStartId; import org.apache.geaflow.dsl.runtime.traversal.operator.StepSourceOperator.StartId; import org.apache.geaflow.dsl.runtime.util.FilterPushDownUtil; import org.apache.geaflow.dsl.util.GQLRexUtil; import org.apache.geaflow.dsl.util.SqlTypeUtil; import org.apache.geaflow.state.data.TimeRange; import org.apache.geaflow.state.pushdown.filter.EdgeTsFilter; import org.apache.geaflow.state.pushdown.filter.IFilter; public class StepLogicalPlanTranslator { /** * Translate path pattern to {@link StepLogicalPlan}. * * @param graphMatch The path pattern to translate. * @return The last node of the {@link StepLogicalPlan}. */ public StepLogicalPlan translate(GraphMatch graphMatch, StepLogicalPlanSet logicalPlanSet) { // do the plan translate. LogicalPlanTranslatorVisitor translator = new LogicalPlanTranslatorVisitor(logicalPlanSet); return translator.translate(graphMatch.getPathPattern()); } /** * Translate the {@link RelNode} in graph match to {@link StepLogicalPlan}. **/ private static class LogicalPlanTranslatorVisitor extends AbstractMatchNodeVisitor<StepLogicalPlan> { private final GraphSchema graphSchema; private final StepLogicalPlanSet logicalPlanSet; private final GraphSchema modifyGraphSchema; // label -> plan private Map<String, StepLogicalPlan> planCache = new HashMap<>(); private StepLogicalPlan logicalPlanHead = null; private final Map<RelNode, RexNode> nodePushDownFilters; public LogicalPlanTranslatorVisitor(StepLogicalPlanSet logicalPlanSet) { this(logicalPlanSet, new HashMap<>()); } private LogicalPlanTranslatorVisitor(StepLogicalPlanSet logicalPlanSet, Map<RelNode, RexNode> nodePushDownFilters) { this.graphSchema = logicalPlanSet.getGraphSchema(); this.logicalPlanSet = Objects.requireNonNull(logicalPlanSet); this.modifyGraphSchema = graphSchema; this.nodePushDownFilters = Objects.requireNonNull(nodePushDownFilters); } public StepLogicalPlan translate(RelNode pathPattern) { return this.visit(pathPattern); } @Override public StepLogicalPlan visitVertexMatch(VertexMatch vertexMatch) { String label = vertexMatch.getLabel(); RexNode filter = nodePushDownFilters.get(vertexMatch); // TODO use optimizer rule to push the filter to the vertex-match. if (vertexMatch.getPushDownFilter() != null) { filter = vertexMatch.getPushDownFilter(); } Set<StartId> startIds = new HashSet<>(); if (vertexMatch.getInput() == null && filter != null) { Set<RexNode> ids = GQLRexUtil.findVertexIds(filter, (VertexRecordType) vertexMatch.getNodeType()); startIds = toStartIds(ids); } else if (!vertexMatch.getIdSet().isEmpty()) { startIds = vertexMatch.getIdSet().stream().map(id -> new ConstantStartId(id)).collect( Collectors.toSet()); } Set<BinaryString> nodeTypes = vertexMatch.getTypes().stream() .map(s -> (BinaryString) BinaryUtil.toBinaryForString(s)) .collect(Collectors.toSet()); // If this head label node has generated in other branch, just reuse it and push down the startIds. if (vertexMatch.getInput() == null && planCache.containsKey(label)) { StepLogicalPlan plan = planCache.get(label); // push start ids to StepSourceOperator assert plan.getInputs().size() == 1; if (plan.getInputs().get(0).getOperator() instanceof StepSourceOperator) { StepSourceOperator sourceOp = (StepSourceOperator) plan.getInputs().get(0).getOperator(); sourceOp.joinStartId(startIds); } if (vertexMatch.getTypes().size() > 0) { return plan.filterNode(new StepNodeTypeFilterFunction(nodeTypes)); } return plan; } IType<?> nodeType = SqlTypeUtil.convertType(vertexMatch.getNodeType()); // generate input plan. StepLogicalPlan input; if (vertexMatch.getInput() != null) { input = this.visit(vertexMatch.getInput()); } else { if (logicalPlanHead == null) { // create start plan for the first time input = StepLogicalPlan.start(startIds) .withGraphSchema(graphSchema) .withModifyGraphSchema(modifyGraphSchema) .withInputPathSchema(PathType.EMPTY) .withOutputPathSchema(PathType.EMPTY) .withOutputType(nodeType); logicalPlanHead = input; } else { // start from the exists start plan. StepLogicalPlan startPlan = logicalPlanHead; assert startPlan.getOperator() instanceof StepSourceOperator : "Start plan should be StepSourceOperator"; // push startIds of this branch to the StepSourceOperator. ((StepSourceOperator) startPlan.getOperator()).unionStartId(startIds); input = startPlan; } } PathType outputPath = (PathType) SqlTypeUtil.convertType(vertexMatch.getPathSchema()); boolean isOptionalMatch = vertexMatch instanceof OptionalVertexMatch && SqlTypeUtil.convertType(vertexMatch.getNodeType()) != null; MatchVertexFunction mvf = new MatchVertexFunctionImpl(nodeTypes, isOptionalMatch, label, vertexMatch.getIdSet()); StepLogicalPlan plan = input.vertexMatch(mvf) .withModifyGraphSchema(input.getModifyGraphSchema()) .withOutputPathSchema(outputPath) .withOutputType(nodeType); planCache.put(label, plan); return plan; } @Override public StepLogicalPlan visitEdgeMatch(EdgeMatch edgeMatch) { String label = edgeMatch.getLabel(); if (planCache.containsKey(label)) { return planCache.get(label); } if (edgeMatch.getInput() == null) { throw new GeaFlowDSLException("Graph match should start from a vertex"); } StepLogicalPlan input = this.visit(edgeMatch.getInput()); IType<?> nodeType = SqlTypeUtil.convertType(edgeMatch.getNodeType()); PathType outputPath = (PathType) SqlTypeUtil.convertType(edgeMatch.getPathSchema()); IFilter<?>[] pushDownFilter = null; RexNode filter = nodePushDownFilters.get(edgeMatch); if (filter != null) { // push down edge timestamp condition IFilter<?> tsRangeFilter = null; List<TimeRange> tsRanges = FilterPushDownUtil.findTsRange(filter, (EdgeRecordType) edgeMatch.getNodeType()).stream().collect(Collectors.toList()); if (!tsRanges.isEmpty()) { for (TimeRange timeRange : tsRanges) { if (tsRangeFilter != null) { tsRangeFilter = tsRangeFilter.or(new EdgeTsFilter(timeRange)); } else { tsRangeFilter = new EdgeTsFilter(timeRange); } } } if (tsRangeFilter != null) { pushDownFilter = new IFilter[]{tsRangeFilter}; } } Set<BinaryString> edgeTypes = edgeMatch.getTypes().stream() .map(s -> (BinaryString) BinaryUtil.toBinaryForString(s)) .collect(Collectors.toSet()); boolean isOptionalMatch = edgeMatch instanceof OptionalEdgeMatch && SqlTypeUtil.convertType(edgeMatch.getNodeType()) != null; MatchEdgeFunction mef = pushDownFilter == null ? new MatchEdgeFunctionImpl(edgeMatch.getDirection(), edgeTypes, isOptionalMatch, label) : new MatchEdgeFunctionImpl(edgeMatch.getDirection(), edgeTypes, isOptionalMatch, label, pushDownFilter); StepLogicalPlan plan = input.edgeMatch(mef) .withModifyGraphSchema(input.getModifyGraphSchema()) .withOutputPathSchema(outputPath) .withOutputType(nodeType); planCache.put(label, plan); return plan; } @Override public StepLogicalPlan visitVirtualEdgeMatch(VirtualEdgeMatch virtualEdgeMatch) { StepLogicalPlan input = this.visit(virtualEdgeMatch.getInput()); PathRecordType inputPath = ((IMatchNode) virtualEdgeMatch.getInput()).getPathSchema(); Expression targetId = ExpressionTranslator.of(inputPath, logicalPlanSet) .translate(virtualEdgeMatch.getTargetId()); PathType outputPath = (PathType) SqlTypeUtil.convertType(virtualEdgeMatch.getPathSchema()); MatchVirtualEdgeFunction virtualEdgeFunction = new MatchVirtualEdgeFunctionImpl(targetId); return input.virtualEdgeMatch(virtualEdgeFunction) .withModifyGraphSchema(input.getModifyGraphSchema()) .withOutputPathSchema(outputPath) .withOutputType(SqlTypeUtil.convertType(virtualEdgeMatch.getNodeType())); } @Override public StepLogicalPlan visitFilter(MatchFilter filter) { // push down filter condition nodePushDownFilters.put(filter.getInput(), filter.getCondition()); StepLogicalPlan input = this.visit(filter.getInput()); PathType outputPath = (PathType) SqlTypeUtil.convertType(filter.getPathSchema()); PathRecordType inputPath = ((IMatchNode) filter.getInput()).getPathSchema(); Expression condition = ExpressionTranslator.of(inputPath, logicalPlanSet).translate(filter.getCondition()); StepBoolFunction fn = new StepBoolFunctionImpl(condition); return input.filter(fn).withModifyGraphSchema(input.getModifyGraphSchema()) .withOutputPathSchema(outputPath); } @Override public StepLogicalPlan visitJoin(MatchJoin join) { JoinInfo joinInfo = join.analyzeCondition(); PathRecordType leftPathType = ((IMatchNode) join.getLeft()).getPathSchema(); PathRecordType rightPathType = ((IMatchNode) join.getRight()).getPathSchema(); IType<?>[] leftKeyTypes = joinInfo.leftKeys.stream() .map(index -> SqlTypeUtil.convertType(leftPathType .getFieldList().get(index).getType())) .collect(Collectors.toList()) .toArray(new IType[]{}); IType<?>[] rightKeyTypes = joinInfo.rightKeys.stream() .map(index -> SqlTypeUtil.convertType(rightPathType .getFieldList().get(index).getType())) .collect(Collectors.toList()) .toArray(new IType[]{}); StepKeyFunction leftKeyFn = new StepKeyFunctionImpl(toIntArray(joinInfo.leftKeys), leftKeyTypes); StepKeyFunction rightKeyFn = new StepKeyFunctionImpl(toIntArray(joinInfo.rightKeys), rightKeyTypes); StepLogicalPlan leftPlan = visit(join.getLeft()); StepLogicalPlan rightPlan = visit(join.getRight()); IType<?>[] leftPathTypes = leftPlan.getOutputPathSchema().getTypes(); IType<?>[] rightPathTypes = rightPlan.getOutputPathSchema().getTypes(); Expression joinConditionExp = ExpressionTranslator.of(join.getPathSchema()).translate(join.getCondition()); StepJoinFunction joinFunction = new StepJoinFunctionImpl(join.getJoinType(), leftPathTypes, rightPathTypes, joinConditionExp); PathType inputJoinPath = (PathType) SqlTypeUtil.convertType(leftPathType.join(rightPathType, join.getCluster().getTypeFactory())); PathType joinOutputPath = (PathType) SqlTypeUtil.convertType(join.getPathSchema()); List<StepLogicalPlan> leftChainableVertex = StepLogicalPlanTranslator.getChainableVertexMatch(leftPlan); List<StepLogicalPlan> rightChainableVertex = StepLogicalPlanTranslator.getChainableVertexMatch(rightPlan); boolean isLocalJoin = false; if (leftChainableVertex.size() == 1 && rightChainableVertex.size() == 1 && joinInfo.leftKeys.size() == 1 && joinInfo.rightKeys.size() == 1) { String leftVertexLabel = ((MatchVertexOperator) leftChainableVertex.get(0).getOperator()).getLabel(); String rightVertexLabel = ((MatchVertexOperator) rightChainableVertex.get(0).getOperator()).getLabel(); if (leftPathType.getFieldList().get(joinInfo.leftKeys.get(0)).getName().equals(leftVertexLabel) && rightPathType.getFieldList().get(joinInfo.rightKeys.get(0)).getName().equals(rightVertexLabel)) { isLocalJoin = true; } } return leftPlan .join(rightPlan, leftKeyFn, rightKeyFn, joinFunction, inputJoinPath, isLocalJoin) .withOutputPathSchema(joinOutputPath); } @Override public StepLogicalPlan visitDistinct(MatchDistinct distinct) { RelNode input = distinct.getInput(0); IType<?>[] types = ((IMatchNode) input).getPathSchema().getFieldList().stream() .map(field -> SqlTypeUtil.convertType(field.getType())) .collect(Collectors.toList()).toArray(new IType[]{}); int[] keyIndices = new int[types.length]; for (int i = 0, size = types.length; i < size; i++) { keyIndices[i] = i; } StepKeyFunction keyFunction = new StepKeyFunctionImpl(keyIndices, types); PathType distinctPathType = (PathType) SqlTypeUtil.convertType(distinct.getPathSchema()); IType<?> nodeType = SqlTypeUtil.convertType(distinct.getNodeType()); return visit(input).distinct(keyFunction) .withOutputPathSchema(distinctPathType) .withOutputType(nodeType); } @Override public StepLogicalPlan visitUnion(MatchUnion union) { List<StepLogicalPlan> inputPlans = new ArrayList<>(); for (int i = 0, size = union.getInputs().size(); i < size; i++) { // The input of union should not referer the plan cache generated by each other. // So we create a new plan cache for each input. Map<String, StepLogicalPlan> prePlanCache = planCache; planCache = new HashMap<>(planCache); inputPlans.add(visit(union.getInput(i))); // recover pre-plan cache. planCache = prePlanCache; } StepLogicalPlan firstPlan = inputPlans.get(0); PathType unionPathType = (PathType) SqlTypeUtil.convertType(union.getPathSchema()); IType<?> nodeType = SqlTypeUtil.convertType(union.getNodeType()); StepLogicalPlan unionPlan = firstPlan.union(inputPlans.subList(1, inputPlans.size())) .withModifyGraphSchema(firstPlan.getModifyGraphSchema()) .withOutputPathSchema(unionPathType) .withOutputType(nodeType); if (union.all) { return unionPlan; } else { IType<?>[] types = unionPlan.getOutputPathSchema().getFields().stream() .map(TableField::getType) .collect(Collectors.toList()).toArray(new IType[]{}); int[] keyIndices = new int[types.length]; for (int i = 0, size = types.length; i < size; i++) { keyIndices[i] = i; } StepKeyFunction keyFunction = new StepKeyFunctionImpl(keyIndices, types); return unionPlan.distinct(keyFunction) .withModifyGraphSchema(unionPlan.getModifyGraphSchema()) .withOutputPathSchema(unionPlan.getOutputPathSchema()) .withOutputType(unionPlan.getOutputType()); } } @Override public StepLogicalPlan visitLoopMatch(LoopUntilMatch loopMatch) { StepLogicalPlan loopStart = visit(loopMatch.getInput()); StepLogicalPlan loopBody = visit(loopMatch.getLoopBody()); for (StepLogicalPlan plan : loopBody.getFinalPlans()) { plan.withModifyGraphSchema(loopStart.getModifyGraphSchema()); } ExpressionTranslator translator = ExpressionTranslator.of(loopMatch.getLoopBody().getPathSchema()); Expression utilCondition = translator.translate(loopMatch.getUtilCondition()); PathType outputPath = (PathType) SqlTypeUtil.convertType(loopMatch.getPathSchema()); IType<?> nodeType = SqlTypeUtil.convertType(loopMatch.getNodeType()); int loopStartPathFieldCount = loopStart.getOutputPathSchema().size(); int loopBodyPathFieldCount = loopBody.getOutputPathSchema().size() - loopStartPathFieldCount; return loopStart.loopUtil(loopBody, new StepBoolFunctionImpl(utilCondition), loopMatch.getMinLoopCount(), loopMatch.getMaxLoopCount(), loopStartPathFieldCount, loopBodyPathFieldCount) .withModifyGraphSchema(loopStart.getModifyGraphSchema()) .withOutputPathSchema(outputPath) .withOutputType(nodeType) ; } @Override public StepLogicalPlan visitSubQueryStart(SubQueryStart subQueryStart) { PathType pathType = (PathType) SqlTypeUtil.convertType(subQueryStart.getPathSchema()); return StepLogicalPlan.subQueryStart(subQueryStart.getQueryName()) .withGraphSchema(graphSchema) .withInputPathSchema(pathType) .withOutputPathSchema(pathType) .withOutputType(SqlTypeUtil.convertType(subQueryStart.getNodeType())); } @Override public StepLogicalPlan visitPathModify(MatchPathModify pathModify) { StepLogicalPlan input = visit(pathModify.getInput()); List<PathModifyExpression> modifyExpressions = pathModify.getExpressions(); int[] updatePathIndices = new int[modifyExpressions.size()]; Expression[] updateExpressions = new Expression[modifyExpressions.size()]; ExpressionTranslator translator = ExpressionTranslator.of(pathModify.getInput().getRowType(), logicalPlanSet); for (int i = 0; i < modifyExpressions.size(); i++) { PathModifyExpression modifyExpression = modifyExpressions.get(i); updatePathIndices[i] = modifyExpression.getIndex(); updateExpressions[i] = translator.translate(modifyExpression.getObjectConstruct()); } IType<?>[] inputFieldTypes = input.getOutputPathSchema().getFields() .stream() .map(TableField::getType) .collect(Collectors.toList()) .toArray(new IType[]{}); GraphSchema modifyGraphSchema = (GraphSchema) SqlTypeUtil.convertType(pathModify.getModifyGraphType()); StepPathModifyFunction modifyFunction = new StepPathModifyFunction(updatePathIndices, updateExpressions, inputFieldTypes); boolean isGlobal = pathModify.getExpressions().stream().anyMatch(exp -> { return exp.getObjectConstruct().getVariableInfo().stream().anyMatch(VariableInfo::isGlobal); }); return input.map(modifyFunction, isGlobal) .withGraphSchema(graphSchema) .withModifyGraphSchema(modifyGraphSchema) .withInputPathSchema(input.getOutputPathSchema()) .withOutputPathSchema((PathType) SqlTypeUtil.convertType(pathModify.getRowType())) .withOutputType(input.getOutputType()); } @Override public StepLogicalPlan visitExtend(MatchExtend matchExtend) { StepLogicalPlan input = visit(matchExtend.getInput()); List<PathModifyExpression> modifyExpressions = matchExtend.getExpressions(); int[] updatePathIndices = new int[modifyExpressions.size()]; Expression[] updateExpressions = new Expression[modifyExpressions.size()]; ExpressionTranslator translator = ExpressionTranslator.of( matchExtend.getInput().getRowType(), logicalPlanSet); int offset = 0; for (int i = 0; i < modifyExpressions.size(); i++) { PathModifyExpression modifyExpression = modifyExpressions.get(i); if (matchExtend.getRewriteFields().contains(modifyExpression.getLeftVar().getLabel())) { updatePathIndices[i] = modifyExpression.getIndex(); } else { updatePathIndices[i] = input.getOutputPathSchema().size() + offset; offset++; } updateExpressions[i] = translator.translate(modifyExpression.getObjectConstruct()); } IType<?>[] inputFieldTypes = input.getOutputPathSchema().getFields() .stream() .map(TableField::getType) .collect(Collectors.toList()) .toArray(new IType[]{}); GraphSchema modifyGraphSchema = (GraphSchema) SqlTypeUtil.convertType(matchExtend.getModifyGraphType()); StepPathModifyFunction modifyFunction = new StepPathModifyFunction(updatePathIndices, updateExpressions, inputFieldTypes); return input.map(modifyFunction, false) .withGraphSchema(graphSchema) .withModifyGraphSchema(modifyGraphSchema) .withInputPathSchema(input.getOutputPathSchema()) .withOutputPathSchema((PathType) SqlTypeUtil.convertType(matchExtend.getRowType())) .withOutputType(input.getOutputType()); } @Override public StepLogicalPlan visitSort(MatchPathSort pathSort) { StepLogicalPlan input = visit(pathSort.getInput()); SortInfo sortInfo = buildSortInfo(pathSort); StepSortFunction orderByFunction = new StepSortFunctionImpl(sortInfo); PathType inputPath = input.getOutputPathSchema(); return input.sort(orderByFunction) .withModifyGraphSchema(input.getModifyGraphSchema()) .withInputPathSchema(inputPath) .withOutputPathSchema(inputPath).withOutputType(inputPath); } @Override public StepLogicalPlan visitAggregate(MatchAggregate matchAggregate) { StepLogicalPlan input = visit(matchAggregate.getInput()); List<RexNode> groupList = matchAggregate.getGroupSet(); RelDataType inputRelDataType = matchAggregate.getInput().getRowType(); List<Expression> groupListExpressions = groupList.stream().map(rex -> ExpressionTranslator.of(inputRelDataType, logicalPlanSet).translate(rex)).collect( Collectors.toList()); StepKeyFunction keyFunction = new StepKeyExpressionFunctionImpl( groupListExpressions.toArray(new Expression[0]), groupListExpressions.stream().map(Expression::getOutputType).toArray(IType<?>[]::new)); List<MatchAggregateCall> aggCalls = matchAggregate.getAggCalls(); List<StepAggCall> aggFnCalls = new ArrayList<>(); for (MatchAggregateCall aggCall : aggCalls) { String name = aggCall.getName(); Expression[] argFields = aggCall.getArgList().stream().map( rex -> ExpressionTranslator.of(inputRelDataType, logicalPlanSet).translate(rex)) .toArray(Expression[]::new); IType<?>[] argFieldTypes = Arrays.stream(argFields).map(Expression::getOutputType) .toArray(IType<?>[]::new); Class<? extends UDAF<?, ?, ?>> udafClass = PhysicAggregateRelNode.findUDAF(aggCall.getAggregation(), argFieldTypes); StepAggCall functionCall = new StepAggCall(name, argFields, argFieldTypes, udafClass, aggCall.isDistinct()); aggFnCalls.add(functionCall); } List<IType<?>> aggOutputTypes = aggCalls.stream() .map(call -> SqlTypeUtil.convertType(call.getType())) .collect(Collectors.toList()); int[] pathPruneIndices = inputRelDataType.getFieldList().stream().filter( f -> matchAggregate.getPathSchema().getFieldNames().contains(f.getName()) ).map(RelDataTypeField::getIndex).mapToInt(Integer::intValue).toArray(); IType<?>[] inputPathTypes = inputRelDataType.getFieldList().stream() .map(f -> SqlTypeUtil.convertType(f.getType())).toArray(IType<?>[]::new); IType<?>[] pathPruneTypes = matchAggregate.getPathSchema().getFieldList().stream() .map(f -> SqlTypeUtil.convertType(f.getType())).toArray(IType<?>[]::new); StepAggregateFunction aggFn = new StepAggExpressionFunctionImpl(pathPruneIndices, pathPruneTypes, inputPathTypes, aggFnCalls, aggOutputTypes); PathType inputPath = input.getOutputPathSchema(); PathType outputPath = (PathType) SqlTypeUtil.convertType(matchAggregate.getRowType()); return input.aggregate(inputPath, outputPath, keyFunction, aggFn); } private SortInfo buildSortInfo(PathSort sort) { SortInfo sortInfo = new SortInfo(); ExpressionTranslator translator = ExpressionTranslator.of(sort.getRowType()); for (RexNode fd : sort.getOrderByExpressions()) { OrderByField orderByField = new OrderByField(); if (fd.getKind() == DESCENDING) { orderByField.order = ORDER.DESC; } else { orderByField.order = ORDER.ASC; } orderByField.expression = translator.translate(fd); sortInfo.orderByFields.add(orderByField); } sortInfo.fetch = sort.getLimit() == null ? -1 : (int) TypeCastUtil.cast( translator.translate(sort.getLimit()).evaluate(null), Integer.class); return sortInfo; } } private static Set<StartId> toStartIds(Set<RexNode> ids) { return ids.stream() .map(StepLogicalPlanTranslator::toStartId) .collect(Collectors.toSet()); } private static StartId toStartId(RexNode id) { List<RexNode> nonLiteralLeafNodes = GQLRexUtil.collect(id, child -> !(child instanceof RexCall) && !(child instanceof RexLiteral)); Expression expression = ExpressionTranslator.of(null).translate(id); if (nonLiteralLeafNodes.isEmpty()) { // all the leaf node is constant. Object constantValue = expression.evaluate(null); return new ConstantStartId(constantValue); } else { Expression idExpression = expression.replace(exp -> { if (exp instanceof ParameterFieldExpression) { ParameterFieldExpression field = (ParameterFieldExpression) exp; return new FieldExpression(field.getFieldIndex(), field.getOutputType()); } return exp; }); return new ParameterStartId(idExpression); } } public static List<StepLogicalPlan> getChainableVertexMatch(StepLogicalPlan startPlan) { if (startPlan == null) { return Collections.emptyList(); } if (startPlan.getOperator() instanceof MatchVertexOperator) { return Collections.singletonList(startPlan); } else if (startPlan.getOperator() instanceof MatchEdgeOperator || startPlan.getOperator() instanceof StepNodeFilterOperator || startPlan.getOperator() instanceof StepLocalExchangeOperator || startPlan.getOperator() instanceof StepLocalSingleValueAggregateOperator) { return startPlan.getInputs().stream().flatMap( input -> StepLogicalPlanTranslator.getChainableVertexMatch(input).stream() ).collect(Collectors.toList()); } return Collections.emptyList(); } }
googleapis/google-cloud-java
34,993
java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/ListGroupsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/group_service.proto // Protobuf Java Version: 3.25.8 package com.google.monitoring.v3; /** * * * <pre> * The `ListGroups` response. * </pre> * * Protobuf type {@code google.monitoring.v3.ListGroupsResponse} */ public final class ListGroupsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.monitoring.v3.ListGroupsResponse) ListGroupsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListGroupsResponse.newBuilder() to construct. private ListGroupsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListGroupsResponse() { group_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListGroupsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.ListGroupsResponse.class, com.google.monitoring.v3.ListGroupsResponse.Builder.class); } public static final int GROUP_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.monitoring.v3.Group> group_; /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ @java.lang.Override public java.util.List<com.google.monitoring.v3.Group> getGroupList() { return group_; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.monitoring.v3.GroupOrBuilder> getGroupOrBuilderList() { return group_; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ @java.lang.Override public int getGroupCount() { return group_.size(); } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ @java.lang.Override public com.google.monitoring.v3.Group getGroup(int index) { return group_.get(index); } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ @java.lang.Override public com.google.monitoring.v3.GroupOrBuilder getGroupOrBuilder(int index) { return group_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < group_.size(); i++) { output.writeMessage(1, group_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < group_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, group_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.monitoring.v3.ListGroupsResponse)) { return super.equals(obj); } com.google.monitoring.v3.ListGroupsResponse other = (com.google.monitoring.v3.ListGroupsResponse) obj; if (!getGroupList().equals(other.getGroupList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getGroupCount() > 0) { hash = (37 * hash) + GROUP_FIELD_NUMBER; hash = (53 * hash) + getGroupList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.monitoring.v3.ListGroupsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.ListGroupsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.monitoring.v3.ListGroupsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.ListGroupsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.monitoring.v3.ListGroupsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The `ListGroups` response. * </pre> * * Protobuf type {@code google.monitoring.v3.ListGroupsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.monitoring.v3.ListGroupsResponse) com.google.monitoring.v3.ListGroupsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.ListGroupsResponse.class, com.google.monitoring.v3.ListGroupsResponse.Builder.class); } // Construct using com.google.monitoring.v3.ListGroupsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (groupBuilder_ == null) { group_ = java.util.Collections.emptyList(); } else { group_ = null; groupBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.monitoring.v3.GroupServiceProto .internal_static_google_monitoring_v3_ListGroupsResponse_descriptor; } @java.lang.Override public com.google.monitoring.v3.ListGroupsResponse getDefaultInstanceForType() { return com.google.monitoring.v3.ListGroupsResponse.getDefaultInstance(); } @java.lang.Override public com.google.monitoring.v3.ListGroupsResponse build() { com.google.monitoring.v3.ListGroupsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.monitoring.v3.ListGroupsResponse buildPartial() { com.google.monitoring.v3.ListGroupsResponse result = new com.google.monitoring.v3.ListGroupsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.monitoring.v3.ListGroupsResponse result) { if (groupBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { group_ = java.util.Collections.unmodifiableList(group_); bitField0_ = (bitField0_ & ~0x00000001); } result.group_ = group_; } else { result.group_ = groupBuilder_.build(); } } private void buildPartial0(com.google.monitoring.v3.ListGroupsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.monitoring.v3.ListGroupsResponse) { return mergeFrom((com.google.monitoring.v3.ListGroupsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.monitoring.v3.ListGroupsResponse other) { if (other == com.google.monitoring.v3.ListGroupsResponse.getDefaultInstance()) return this; if (groupBuilder_ == null) { if (!other.group_.isEmpty()) { if (group_.isEmpty()) { group_ = other.group_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureGroupIsMutable(); group_.addAll(other.group_); } onChanged(); } } else { if (!other.group_.isEmpty()) { if (groupBuilder_.isEmpty()) { groupBuilder_.dispose(); groupBuilder_ = null; group_ = other.group_; bitField0_ = (bitField0_ & ~0x00000001); groupBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getGroupFieldBuilder() : null; } else { groupBuilder_.addAllMessages(other.group_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.monitoring.v3.Group m = input.readMessage(com.google.monitoring.v3.Group.parser(), extensionRegistry); if (groupBuilder_ == null) { ensureGroupIsMutable(); group_.add(m); } else { groupBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.monitoring.v3.Group> group_ = java.util.Collections.emptyList(); private void ensureGroupIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { group_ = new java.util.ArrayList<com.google.monitoring.v3.Group>(group_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.monitoring.v3.Group, com.google.monitoring.v3.Group.Builder, com.google.monitoring.v3.GroupOrBuilder> groupBuilder_; /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public java.util.List<com.google.monitoring.v3.Group> getGroupList() { if (groupBuilder_ == null) { return java.util.Collections.unmodifiableList(group_); } else { return groupBuilder_.getMessageList(); } } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public int getGroupCount() { if (groupBuilder_ == null) { return group_.size(); } else { return groupBuilder_.getCount(); } } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public com.google.monitoring.v3.Group getGroup(int index) { if (groupBuilder_ == null) { return group_.get(index); } else { return groupBuilder_.getMessage(index); } } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder setGroup(int index, com.google.monitoring.v3.Group value) { if (groupBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGroupIsMutable(); group_.set(index, value); onChanged(); } else { groupBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder setGroup(int index, com.google.monitoring.v3.Group.Builder builderForValue) { if (groupBuilder_ == null) { ensureGroupIsMutable(); group_.set(index, builderForValue.build()); onChanged(); } else { groupBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder addGroup(com.google.monitoring.v3.Group value) { if (groupBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGroupIsMutable(); group_.add(value); onChanged(); } else { groupBuilder_.addMessage(value); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder addGroup(int index, com.google.monitoring.v3.Group value) { if (groupBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureGroupIsMutable(); group_.add(index, value); onChanged(); } else { groupBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder addGroup(com.google.monitoring.v3.Group.Builder builderForValue) { if (groupBuilder_ == null) { ensureGroupIsMutable(); group_.add(builderForValue.build()); onChanged(); } else { groupBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder addGroup(int index, com.google.monitoring.v3.Group.Builder builderForValue) { if (groupBuilder_ == null) { ensureGroupIsMutable(); group_.add(index, builderForValue.build()); onChanged(); } else { groupBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder addAllGroup( java.lang.Iterable<? extends com.google.monitoring.v3.Group> values) { if (groupBuilder_ == null) { ensureGroupIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, group_); onChanged(); } else { groupBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder clearGroup() { if (groupBuilder_ == null) { group_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { groupBuilder_.clear(); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public Builder removeGroup(int index) { if (groupBuilder_ == null) { ensureGroupIsMutable(); group_.remove(index); onChanged(); } else { groupBuilder_.remove(index); } return this; } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public com.google.monitoring.v3.Group.Builder getGroupBuilder(int index) { return getGroupFieldBuilder().getBuilder(index); } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public com.google.monitoring.v3.GroupOrBuilder getGroupOrBuilder(int index) { if (groupBuilder_ == null) { return group_.get(index); } else { return groupBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public java.util.List<? extends com.google.monitoring.v3.GroupOrBuilder> getGroupOrBuilderList() { if (groupBuilder_ != null) { return groupBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(group_); } } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public com.google.monitoring.v3.Group.Builder addGroupBuilder() { return getGroupFieldBuilder().addBuilder(com.google.monitoring.v3.Group.getDefaultInstance()); } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public com.google.monitoring.v3.Group.Builder addGroupBuilder(int index) { return getGroupFieldBuilder() .addBuilder(index, com.google.monitoring.v3.Group.getDefaultInstance()); } /** * * * <pre> * The groups that match the specified filters. * </pre> * * <code>repeated .google.monitoring.v3.Group group = 1;</code> */ public java.util.List<com.google.monitoring.v3.Group.Builder> getGroupBuilderList() { return getGroupFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.monitoring.v3.Group, com.google.monitoring.v3.Group.Builder, com.google.monitoring.v3.GroupOrBuilder> getGroupFieldBuilder() { if (groupBuilder_ == null) { groupBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.monitoring.v3.Group, com.google.monitoring.v3.Group.Builder, com.google.monitoring.v3.GroupOrBuilder>( group_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); group_ = null; } return groupBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * If there are more results than have been returned, then this field is set * to a non-empty value. To see the additional results, * use that value as `page_token` in the next call to this method. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.monitoring.v3.ListGroupsResponse) } // @@protoc_insertion_point(class_scope:google.monitoring.v3.ListGroupsResponse) private static final com.google.monitoring.v3.ListGroupsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.monitoring.v3.ListGroupsResponse(); } public static com.google.monitoring.v3.ListGroupsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListGroupsResponse> PARSER = new com.google.protobuf.AbstractParser<ListGroupsResponse>() { @java.lang.Override public ListGroupsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListGroupsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListGroupsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.monitoring.v3.ListGroupsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,254
java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1/stub/HttpJsonSchemaServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.discoveryengine.v1.stub; import static com.google.cloud.discoveryengine.v1.SchemaServiceClient.ListSchemasPagedResponse; import com.google.api.HttpRule; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.discoveryengine.v1.CreateSchemaMetadata; import com.google.cloud.discoveryengine.v1.CreateSchemaRequest; import com.google.cloud.discoveryengine.v1.DeleteSchemaMetadata; import com.google.cloud.discoveryengine.v1.DeleteSchemaRequest; import com.google.cloud.discoveryengine.v1.GetSchemaRequest; import com.google.cloud.discoveryengine.v1.ListSchemasRequest; import com.google.cloud.discoveryengine.v1.ListSchemasResponse; import com.google.cloud.discoveryengine.v1.Schema; import com.google.cloud.discoveryengine.v1.UpdateSchemaMetadata; import com.google.cloud.discoveryengine.v1.UpdateSchemaRequest; import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the SchemaService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonSchemaServiceStub extends SchemaServiceStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder() .add(Empty.getDescriptor()) .add(Schema.getDescriptor()) .add(UpdateSchemaMetadata.getDescriptor()) .add(CreateSchemaMetadata.getDescriptor()) .add(DeleteSchemaMetadata.getDescriptor()) .build(); private static final ApiMethodDescriptor<GetSchemaRequest, Schema> getSchemaMethodDescriptor = ApiMethodDescriptor.<GetSchemaRequest, Schema>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1.SchemaService/GetSchema") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetSchemaRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/dataStores/*/schemas/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setAdditionalPaths( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Schema>newBuilder() .setDefaultInstance(Schema.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListSchemasRequest, ListSchemasResponse> listSchemasMethodDescriptor = ApiMethodDescriptor.<ListSchemasRequest, ListSchemasResponse>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1.SchemaService/ListSchemas") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListSchemasRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*/dataStores/*}/schemas", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListSchemasRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setAdditionalPaths( "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListSchemasRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListSchemasResponse>newBuilder() .setDefaultInstance(ListSchemasResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<CreateSchemaRequest, Operation> createSchemaMethodDescriptor = ApiMethodDescriptor.<CreateSchemaRequest, Operation>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1.SchemaService/CreateSchema") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateSchemaRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*/dataStores/*}/schemas", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setAdditionalPaths( "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/schemas") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "schemaId", request.getSchemaId()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("schema", request.getSchema(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (CreateSchemaRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<UpdateSchemaRequest, Operation> updateSchemaMethodDescriptor = ApiMethodDescriptor.<UpdateSchemaRequest, Operation>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1.SchemaService/UpdateSchema") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateSchemaRequest>newBuilder() .setPath( "/v1/{schema.name=projects/*/locations/*/dataStores/*/schemas/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "schema.name", request.getSchema().getName()); return fields; }) .setAdditionalPaths( "/v1/{schema.name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam( fields, "allowMissing", request.getAllowMissing()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("schema", request.getSchema(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateSchemaRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<DeleteSchemaRequest, Operation> deleteSchemaMethodDescriptor = ApiMethodDescriptor.<DeleteSchemaRequest, Operation>newBuilder() .setFullMethodName("google.cloud.discoveryengine.v1.SchemaService/DeleteSchema") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteSchemaRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/dataStores/*/schemas/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setAdditionalPaths( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}") .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteSchemaRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteSchemaRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private final UnaryCallable<GetSchemaRequest, Schema> getSchemaCallable; private final UnaryCallable<ListSchemasRequest, ListSchemasResponse> listSchemasCallable; private final UnaryCallable<ListSchemasRequest, ListSchemasPagedResponse> listSchemasPagedCallable; private final UnaryCallable<CreateSchemaRequest, Operation> createSchemaCallable; private final OperationCallable<CreateSchemaRequest, Schema, CreateSchemaMetadata> createSchemaOperationCallable; private final UnaryCallable<UpdateSchemaRequest, Operation> updateSchemaCallable; private final OperationCallable<UpdateSchemaRequest, Schema, UpdateSchemaMetadata> updateSchemaOperationCallable; private final UnaryCallable<DeleteSchemaRequest, Operation> deleteSchemaCallable; private final OperationCallable<DeleteSchemaRequest, Empty, DeleteSchemaMetadata> deleteSchemaOperationCallable; private final BackgroundResource backgroundResources; private final HttpJsonOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonSchemaServiceStub create(SchemaServiceStubSettings settings) throws IOException { return new HttpJsonSchemaServiceStub(settings, ClientContext.create(settings)); } public static final HttpJsonSchemaServiceStub create(ClientContext clientContext) throws IOException { return new HttpJsonSchemaServiceStub( SchemaServiceStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonSchemaServiceStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonSchemaServiceStub( SchemaServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonSchemaServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonSchemaServiceStub( SchemaServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonSchemaServiceCallableFactory()); } /** * Constructs an instance of HttpJsonSchemaServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected HttpJsonSchemaServiceStub( SchemaServiceStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonOperationsStub.create( clientContext, callableFactory, typeRegistry, ImmutableMap.<String, HttpRule>builder() .put( "google.longrunning.Operations.CancelOperation", HttpRule.newBuilder() .setPost("/v1/{name=projects/*/operations/*}:cancel") .addAdditionalBindings( HttpRule.newBuilder() .setPost( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setPost( "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}:cancel") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setPost( "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel") .build()) .build()) .put( "google.longrunning.Operations.GetOperation", HttpRule.newBuilder() .setGet("/v1/{name=projects/*/operations/*}") .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/identityMappingStores/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=projects/*/locations/*/operations/*}") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=projects/*/operations/*}") .build()) .build()) .put( "google.longrunning.Operations.ListOperations", HttpRule.newBuilder() .setGet("/v1/{name=projects/*}/operations") .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/collections/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=projects/*/locations/*/dataStores/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet( "/v1/{name=projects/*/locations/*/identityMappingStores/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=projects/*/locations/*}/operations") .build()) .addAdditionalBindings( HttpRule.newBuilder() .setGet("/v1/{name=projects/*}/operations") .build()) .build()) .build()); HttpJsonCallSettings<GetSchemaRequest, Schema> getSchemaTransportSettings = HttpJsonCallSettings.<GetSchemaRequest, Schema>newBuilder() .setMethodDescriptor(getSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListSchemasRequest, ListSchemasResponse> listSchemasTransportSettings = HttpJsonCallSettings.<ListSchemasRequest, ListSchemasResponse>newBuilder() .setMethodDescriptor(listSchemasMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<CreateSchemaRequest, Operation> createSchemaTransportSettings = HttpJsonCallSettings.<CreateSchemaRequest, Operation>newBuilder() .setMethodDescriptor(createSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateSchemaRequest, Operation> updateSchemaTransportSettings = HttpJsonCallSettings.<UpdateSchemaRequest, Operation>newBuilder() .setMethodDescriptor(updateSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("schema.name", String.valueOf(request.getSchema().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteSchemaRequest, Operation> deleteSchemaTransportSettings = HttpJsonCallSettings.<DeleteSchemaRequest, Operation>newBuilder() .setMethodDescriptor(deleteSchemaMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.getSchemaCallable = callableFactory.createUnaryCallable( getSchemaTransportSettings, settings.getSchemaSettings(), clientContext); this.listSchemasCallable = callableFactory.createUnaryCallable( listSchemasTransportSettings, settings.listSchemasSettings(), clientContext); this.listSchemasPagedCallable = callableFactory.createPagedCallable( listSchemasTransportSettings, settings.listSchemasSettings(), clientContext); this.createSchemaCallable = callableFactory.createUnaryCallable( createSchemaTransportSettings, settings.createSchemaSettings(), clientContext); this.createSchemaOperationCallable = callableFactory.createOperationCallable( createSchemaTransportSettings, settings.createSchemaOperationSettings(), clientContext, httpJsonOperationsStub); this.updateSchemaCallable = callableFactory.createUnaryCallable( updateSchemaTransportSettings, settings.updateSchemaSettings(), clientContext); this.updateSchemaOperationCallable = callableFactory.createOperationCallable( updateSchemaTransportSettings, settings.updateSchemaOperationSettings(), clientContext, httpJsonOperationsStub); this.deleteSchemaCallable = callableFactory.createUnaryCallable( deleteSchemaTransportSettings, settings.deleteSchemaSettings(), clientContext); this.deleteSchemaOperationCallable = callableFactory.createOperationCallable( deleteSchemaTransportSettings, settings.deleteSchemaOperationSettings(), clientContext, httpJsonOperationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(getSchemaMethodDescriptor); methodDescriptors.add(listSchemasMethodDescriptor); methodDescriptors.add(createSchemaMethodDescriptor); methodDescriptors.add(updateSchemaMethodDescriptor); methodDescriptors.add(deleteSchemaMethodDescriptor); return methodDescriptors; } public HttpJsonOperationsStub getHttpJsonOperationsStub() { return httpJsonOperationsStub; } @Override public UnaryCallable<GetSchemaRequest, Schema> getSchemaCallable() { return getSchemaCallable; } @Override public UnaryCallable<ListSchemasRequest, ListSchemasResponse> listSchemasCallable() { return listSchemasCallable; } @Override public UnaryCallable<ListSchemasRequest, ListSchemasPagedResponse> listSchemasPagedCallable() { return listSchemasPagedCallable; } @Override public UnaryCallable<CreateSchemaRequest, Operation> createSchemaCallable() { return createSchemaCallable; } @Override public OperationCallable<CreateSchemaRequest, Schema, CreateSchemaMetadata> createSchemaOperationCallable() { return createSchemaOperationCallable; } @Override public UnaryCallable<UpdateSchemaRequest, Operation> updateSchemaCallable() { return updateSchemaCallable; } @Override public OperationCallable<UpdateSchemaRequest, Schema, UpdateSchemaMetadata> updateSchemaOperationCallable() { return updateSchemaOperationCallable; } @Override public UnaryCallable<DeleteSchemaRequest, Operation> deleteSchemaCallable() { return deleteSchemaCallable; } @Override public OperationCallable<DeleteSchemaRequest, Empty, DeleteSchemaMetadata> deleteSchemaOperationCallable() { return deleteSchemaOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
oracle/graal
34,752
truffle/src/com.oracle.truffle.api.library.test/src/com/oracle/truffle/api/library/test/CachedLibraryTest.java
/* * Copyright (c) 2018, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.api.library.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import org.junit.Test; import com.oracle.truffle.api.Assumption; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.Truffle; import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.Fallback; import com.oracle.truffle.api.dsl.GenerateInline; import com.oracle.truffle.api.dsl.GenerateUncached; import com.oracle.truffle.api.dsl.Introspectable; import com.oracle.truffle.api.dsl.NodeChild; import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.dsl.UnsupportedSpecializationException; import com.oracle.truffle.api.frame.Frame; import com.oracle.truffle.api.frame.MaterializedFrame; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.interop.InteropLibrary; import com.oracle.truffle.api.interop.UnsupportedMessageException; import com.oracle.truffle.api.library.CachedLibrary; import com.oracle.truffle.api.library.ExportLibrary; import com.oracle.truffle.api.library.ExportMessage; import com.oracle.truffle.api.library.GenerateLibrary; import com.oracle.truffle.api.library.Library; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.AssumptionNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.BoundaryFallthroughNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.CachedLibraryWithVarArgsExecuteNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.ConstantLimitNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.ConstantNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.DoubleNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.ExcludeNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.FromCached1NodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.FromCached2NodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.ReplaceCachedLibraryTestNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.SimpleDispatchedNodeGen; import com.oracle.truffle.api.library.test.CachedLibraryTestFactory.SimpleNodeGen; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.test.AbstractLibraryTest; import com.oracle.truffle.api.test.ExpectError; @SuppressWarnings({"truffle-inlining", "truffle-neverdefault", "truffle-sharing"}) public class CachedLibraryTest extends AbstractLibraryTest { @GenerateLibrary @SuppressWarnings("unused") public abstract static class SomethingLibrary extends Library { public boolean guard(Object receiver) { return true; } public String call(Object receiver) { return "default"; } } @ExportLibrary(SomethingLibrary.class) public static class Something { private final String name; Something() { this(null); } Something(String name) { this.name = name; } @ExportMessage boolean accepts(@Cached(value = "this") Something cachedS) { // use identity caches to make it easier to overflow return this == cachedS; } @ExportMessage static final String call(Something s, @Cached(value = "0", uncached = "1") int cached) { if (cached == 0) { if (s.name != null) { return s.name + "_cached"; } else { return "cached"; } } else { if (s.name != null) { return s.name + "_uncached"; } else { return "uncached"; } } } } @GenerateUncached @Introspectable public abstract static class SimpleNode extends Node { abstract String execute(Object receiver); @Specialization(limit = "2") public static String s0(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib1) { return lib1.call(receiver); } } @Test public void testSimple() { Something s1 = new Something(); Something s2 = new Something(); Something s3 = new Something(); assertEquals("uncached", SimpleNodeGen.getUncached().execute(s1)); assertEquals("uncached", SimpleNodeGen.getUncached().execute(s2)); SimpleNode node = adopt(SimpleNodeGen.create()); assertEquals("cached", node.execute(s1)); assertEquals("cached", node.execute(s2)); assertEquals("uncached", node.execute(s3)); assertEquals("uncached", node.execute(s1)); assertEquals("uncached", node.execute(s2)); } @GenerateUncached public abstract static class ExcludeNode extends Node { abstract String execute(Object receiver); private static int call = 0; @Specialization(limit = "2", rewriteOn = ArithmeticException.class) public static String s0(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib1) throws ArithmeticException { if (call++ == 1) { throw new ArithmeticException(); } return lib1.call(receiver) + "_s0"; } @Specialization(replaces = "s0", limit = "2") public static String s1(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib1) throws ArithmeticException { return lib1.call(receiver) + "_s1"; } } @Test public void testExclude() { Something s1 = new Something(); Something s2 = new Something(); Something s3 = new Something(); assertEquals("uncached_s1", ExcludeNodeGen.getUncached().execute(s1)); assertEquals("uncached_s1", ExcludeNodeGen.getUncached().execute(s2)); assertEquals("uncached_s1", ExcludeNodeGen.getUncached().execute(s3)); ExcludeNode.call = 0; ExcludeNode node = adopt(ExcludeNodeGen.create()); assertEquals("cached_s0", node.execute(s1)); assertEquals("cached_s1", node.execute(s2)); assertEquals("cached_s1", node.execute(s3)); assertEquals("uncached_s1", node.execute(s1)); assertEquals("uncached_s1", node.execute(s2)); } @GenerateUncached public abstract static class AssumptionNode extends Node { abstract String execute(Object receiver); static Assumption a; @Specialization(limit = "2", assumptions = "a") public static String s0(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib1) throws ArithmeticException { return lib1.call(receiver) + "_s0"; } @Specialization(replaces = "s0", limit = "2") public static String s1(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib1) throws ArithmeticException { return lib1.call(receiver) + "_s1"; } } @Test public void testAssumption() { Something s1 = new Something(); Something s2 = new Something(); Something s3 = new Something(); assertEquals("uncached_s1", AssumptionNodeGen.getUncached().execute(s1)); assertEquals("uncached_s1", AssumptionNodeGen.getUncached().execute(s2)); assertEquals("uncached_s1", AssumptionNodeGen.getUncached().execute(s3)); // test with null assumption AssumptionNode.a = null; AssumptionNode node = adopt(AssumptionNodeGen.create()); assertEquals("cached_s1", node.execute(s1)); assertEquals("cached_s1", node.execute(s2)); assertEquals("uncached_s1", node.execute(s3)); assertEquals("uncached_s1", node.execute(s2)); assertEquals("uncached_s1", node.execute(s1)); node = adopt(AssumptionNodeGen.create()); Assumption a0 = AssumptionNode.a = Truffle.getRuntime().createAssumption(); assertEquals("cached_s0", node.execute(s1)); AssumptionNode.a = Truffle.getRuntime().createAssumption(); assertEquals("cached_s0", node.execute(s2)); a0.invalidate(); AssumptionNode.a = Truffle.getRuntime().createAssumption(); assertEquals("cached_s0", node.execute(s3)); assertEquals("cached_s1", node.execute(s1)); assertEquals("cached_s1", node.execute(s2)); assertEquals("uncached_s1", node.execute(s3)); } @GenerateUncached @SuppressWarnings("unused") @GenerateInline(false) public abstract static class ConstantNode extends Node { abstract String execute(Object receiver); @Specialization public static String s1(Object receiver, @CachedLibrary("42") SomethingLibrary lib) { return lib.call(42); } } @Test public void testConstant() { assertEquals("default", ConstantNodeGen.getUncached().execute(42)); ConstantNode node = adopt(ConstantNodeGen.create()); assertEquals("default", node.execute(42)); } /* * Test that if the library receiver is bound to a cached we don't actually need to insert an * accepts guard. So we will not trigger multiple instances (unless the specialization does so). */ @GenerateUncached @SuppressWarnings("unused") public abstract static class FromCached1Node extends Node { abstract String execute(Object receiver); @Specialization(guards = "receiver == cachedReceiver", limit = "3") public static String s1(Object receiver, @Cached("receiver") Object cachedReceiver, @CachedLibrary("cachedReceiver") SomethingLibrary lib) { return lib.call(cachedReceiver); } } @Test public void testFromCached1() { Something s1 = new Something("s1"); Something s2 = new Something("s2"); FromCached1Node uncached = FromCached1NodeGen.getUncached(); assertEquals("s1_uncached", uncached.execute(s1)); assertEquals("s2_uncached", uncached.execute(s2)); FromCached1Node cached = adopt(FromCached1NodeGen.create()); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s2_cached", cached.execute(s2)); } /* * Same as FromCached1Node but without identity cache. */ @GenerateUncached @SuppressWarnings("unused") public abstract static class FromCached2Node extends Node { abstract String execute(Object receiver); @Specialization public static String s1(Object receiver, @Cached("receiver") Object cachedReceiver, @CachedLibrary("cachedReceiver") SomethingLibrary lib) { return lib.call(cachedReceiver); } } @Test public void testFromCached2() { Something s1 = new Something("s1"); Something s2 = new Something("s2"); FromCached2Node uncached = FromCached2NodeGen.getUncached(); assertEquals("s1_uncached", uncached.execute(s1)); assertEquals("s2_uncached", uncached.execute(s2)); FromCached2Node cached = adopt(FromCached2NodeGen.create()); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s1_cached", cached.execute(s2)); } @GenerateUncached @SuppressWarnings("unused") public abstract static class DoubleNode extends Node { abstract String execute(Object a0, Object a1); @Specialization(limit = "4") public static String s1(Object a0, Object a1, @CachedLibrary("a0") SomethingLibrary lib1, @CachedLibrary("a1") SomethingLibrary lib2) { return lib1.call(a0) + "_" + lib2.call(a1); } } @Test public void testDouble() { Something s1 = new Something("s1"); Something s2 = new Something("s2"); Something s3 = new Something("s3"); DoubleNode uncached = adopt(DoubleNodeGen.getUncached()); assertEquals("s1_uncached_s1_uncached", uncached.execute(s1, s1)); assertEquals("s1_uncached_s2_uncached", uncached.execute(s1, s2)); assertEquals("s2_uncached_s1_uncached", uncached.execute(s2, s1)); assertEquals("s1_uncached_s3_uncached", uncached.execute(s1, s3)); assertEquals("s3_uncached_s1_uncached", uncached.execute(s3, s1)); DoubleNode cached = adopt(DoubleNodeGen.create()); assertEquals("s1_cached_s1_cached", cached.execute(s1, s1)); assertEquals("s1_cached_s1_cached", cached.execute(s1, s1)); assertEquals("s1_cached_s2_cached", cached.execute(s1, s2)); assertEquals("s2_cached_s1_cached", cached.execute(s2, s1)); assertEquals("s2_cached_s2_cached", cached.execute(s2, s2)); assertEquals("s1_cached_s1_cached", cached.execute(s1, s1)); assertEquals("s1_cached_s1_cached", cached.execute(s1, s1)); assertEquals("s1_cached_s2_cached", cached.execute(s1, s2)); assertEquals("s2_cached_s1_cached", cached.execute(s2, s1)); assertEquals("s2_cached_s2_cached", cached.execute(s2, s2)); assertEquals("s3_uncached_s1_uncached", cached.execute(s3, s1)); assertEquals("s1_uncached_s1_uncached", cached.execute(s1, s1)); assertEquals("s2_uncached_s1_uncached", cached.execute(s2, s1)); assertEquals("s1_uncached_s2_uncached", cached.execute(s1, s2)); assertEquals("s2_uncached_s2_uncached", cached.execute(s2, s2)); } @GenerateUncached @SuppressWarnings("unused") public abstract static class SimpleDispatchedNode extends Node { static int limit = 2; abstract String execute(Object a0); @Specialization public static String s1(Object a0, @CachedLibrary(limit = "limit") SomethingLibrary lib1) { return lib1.call(a0); } } @Test public void testDispatched() { SimpleDispatchedNode uncached; SimpleDispatchedNode cached; Something s1 = new Something("s1"); Something s2 = new Something("s2"); Something s3 = new Something("s3"); uncached = adopt(SimpleDispatchedNodeGen.getUncached()); assertEquals("s1_uncached", uncached.execute(s1)); assertEquals("s1_uncached", uncached.execute(s1)); assertEquals("s2_uncached", uncached.execute(s2)); assertEquals("s3_uncached", uncached.execute(s3)); SimpleDispatchedNode.limit = 0; cached = adopt(SimpleDispatchedNodeGen.create()); assertEquals("s1_uncached", cached.execute(s1)); assertEquals("s1_uncached", cached.execute(s1)); assertEquals("s2_uncached", cached.execute(s2)); assertEquals("s3_uncached", cached.execute(s3)); assertEquals("s1_uncached", cached.execute(s1)); SimpleDispatchedNode.limit = 1; cached = adopt(SimpleDispatchedNodeGen.create()); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s2_uncached", cached.execute(s2)); assertEquals("s3_uncached", cached.execute(s3)); assertEquals("s1_uncached", cached.execute(s1)); SimpleDispatchedNode.limit = 2; cached = adopt(SimpleDispatchedNodeGen.create()); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s2_cached", cached.execute(s2)); assertEquals("s3_uncached", cached.execute(s3)); assertEquals("s2_uncached", cached.execute(s2)); assertEquals("s1_uncached", cached.execute(s1)); SimpleDispatchedNode.limit = 3; cached = adopt(SimpleDispatchedNodeGen.create()); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s1_cached", cached.execute(s1)); assertEquals("s2_cached", cached.execute(s2)); assertEquals("s3_cached", cached.execute(s3)); } @SuppressWarnings("unused") public abstract static class ConstantLimitNode extends Node { static int limit = 2; abstract String execute(Object a0); @Specialization public static String s1(Object a0, @CachedLibrary(limit = "0") SomethingLibrary lib1) { return lib1.call(a0); } } @Test public void testZeroConstantLimit() { Something s1 = new Something("s1"); Something s2 = new Something("s2"); ConstantLimitNode cached = ConstantLimitNodeGen.create(); assertEquals("s1_uncached", cached.execute(s1)); assertEquals("s2_uncached", cached.execute(s2)); } @SuppressWarnings("unused") public abstract static class FallbackTest extends Node { static final String TEST_STRING = "test"; static int limit = 2; abstract String execute(Object a0); @Specialization(guards = "lib1.call(a0).equals(TEST_STRING)", limit = "3") public static String s1(Object a0, @CachedLibrary("a0") SomethingLibrary lib1) { return lib1.call(a0); } @Fallback public static String fallback(Object a0) { return "fallback"; } } @Test public void testBoundaryFallthrough() { BoundaryFallthroughNode node = adopt(BoundaryFallthroughNodeGen.create()); node.execute(1); node.execute(2); node.execute(3); node.execute(3); node.execute(5); try { node.execute(6); fail(); } catch (UnsupportedSpecializationException e) { } assertEquals(5, node.invocationCount); } @SuppressWarnings("unused") public abstract static class ReplaceCachedLibraryTest extends Node { static final String TEST_STRING = "test"; static int limit = 2; abstract String execute(Object a0); @Specialization(limit = "2") public static String s0(Object a0, @CachedLibrary("a0") SomethingLibrary lib1) { return "s0_" + lib1.call(a0); } @Specialization(replaces = "s0") public static String s1(Object a0) { return "s1"; } } @Test public void testReplace() { Something s1 = new Something("1"); Something s2 = new Something("2"); Something s3 = new Something("3"); ReplaceCachedLibraryTest node = adopt(ReplaceCachedLibraryTestNodeGen.create()); assertEquals("s0_1_cached", node.execute(s1)); assertEquals("s0_2_cached", node.execute(s2)); assertEquals("s1", node.execute(s3)); node.execute(2); node.execute(3); node.execute(3); node.execute(5); } @Test public void testCachedLibraryWithVarArgsExecute() { CachedLibraryWithVarArgsExecute node = adoptNode(CachedLibraryWithVarArgsExecuteNodeGen.create(new ArgumentNode[]{ new ArgumentNode(), new ArgumentNode(), new ArgumentNode() })).get(); // test execute with nodes assertEquals(3, node.execute()); assertEquals(2, node.execute(0)); assertEquals(1, node.execute(0, 0)); assertEquals(0, node.execute(0, 0, 0)); // test execute with varargs assertEquals(3, node.executeVarArgs(new Object[]{1, 1, 1})); assertEquals(2, node.executeVarArgs(0, new Object[]{1, 1})); assertEquals(1, node.executeVarArgs(0, 0, new Object[]{1})); assertEquals(0, node.executeVarArgs(0, 0, 0, new Object[]{})); // test with varargs and frame VirtualFrame frame = Truffle.getRuntime().createVirtualFrame(new Object[0], node.getRootNode().getFrameDescriptor()); assertEquals(3, node.executeWithFrame(frame, new Object[]{1, 1, 1})); assertEquals(2, node.executeWithFrame(frame, 0, new Object[]{1, 1})); assertEquals(1, node.executeWithFrame(frame, 0, 0, new Object[]{1})); assertEquals(0, node.executeWithFrame(frame, 0, 0, 0, new Object[]{})); } public static class ArgumentNode extends Node { public Object execute() { return 1; } } /* * Test for GR-27335. */ @NodeChild(value = "arguments", type = ArgumentNode[].class) abstract static class CachedLibraryWithVarArgsExecute extends Node { abstract Object executeWithFrame(VirtualFrame frame, Object... arguments); abstract Object executeWithFrame(VirtualFrame frame, Object arg0, Object... arguments); abstract Object executeWithFrame(VirtualFrame frame, Object arg0, Object arg1, Object... arguments); abstract Object executeWithFrame(VirtualFrame frame, Object arg0, Object arg1, Object arg2, Object... arguments); abstract Object executeWithFrame(VirtualFrame frame); abstract Object executeVarArgs(Object... arguments); abstract Object executeVarArgs(Object arg0, Object... arguments); abstract Object executeVarArgs(Object arg0, Object arg1, Object... arguments); abstract Object executeVarArgs(Object arg0, Object arg1, Object arg2, Object... arguments); abstract Object execute(); abstract Object execute(Object arg0); abstract Object execute(Object arg0, Object arg1); abstract Object execute(Object arg0, Object arg1, Object arg2); static int LIMIT = 0; /* * We don't use a constant limit to avoid optimizations in the DSL. */ @Specialization(limit = "LIMIT") @SuppressWarnings("unused") protected static final int doExecute(Object arg0, Object arg1, Object arg2, @CachedLibrary("arg0") final InteropLibrary interop0, @CachedLibrary("arg1") final InteropLibrary interop1, @CachedLibrary("arg2") final InteropLibrary interop2) { try { return interop0.asInt(arg0) + interop1.asInt(arg1) + interop2.asInt(arg2); } catch (UnsupportedMessageException e) { throw CompilerDirectives.shouldNotReachHere(e); } } } @SuppressWarnings("unused") public abstract static class BoundaryFallthroughNode extends Node { abstract Object execute(Object arg); private int invocationCount = 0; @Specialization(guards = "arg == cachedArg", limit = "2") Object doNative(Object arg, @Cached("arg") Object cachedArg, @CachedLibrary("arg") InteropLibrary interop) { invocationCount++; return null; } protected static final Object initArg(Object arg, Object ctx) { return arg; } } public abstract static class TestBoundaryAndVirtualFrame1 extends Node { public abstract Object execute(VirtualFrame frame, Object arg); @SuppressWarnings("unused") @Specialization(limit = "3") String doDefault(VirtualFrame frame, Object arg, @CachedLibrary("arg") SomethingLibrary interop) { return null; } } public abstract static class TestBoundaryAndVirtualFrame2 extends Node { public abstract Object execute(VirtualFrame frame, Object arg); @SuppressWarnings("unused") @Specialization(limit = "3") String doDefault(Object arg, @CachedLibrary("arg") SomethingLibrary interop) { return null; } } public abstract static class TestBoundaryAndFrame extends Node { public abstract Object execute(Frame frame, Object arg); @SuppressWarnings("unused") @Specialization(limit = "3") String doDefault(Frame frame, Object arg, @CachedLibrary("arg") SomethingLibrary interop) { return null; } } public abstract static class TestBoundaryAndMaterializedFrame extends Node { public abstract Object execute(MaterializedFrame frame, Object arg); @SuppressWarnings("unused") @Specialization(limit = "3") String doDefault(MaterializedFrame frame, Object arg, @CachedLibrary("arg") SomethingLibrary interop) { return null; } } @GenerateLibrary @SuppressWarnings("unused") public abstract static class InExportsLibrary extends Library { public String m0(Object receiver) { return "m0_default"; } public String m1(Object receiver) { return "m1_default"; } } @ExportLibrary(InExportsLibrary.class) final class InExportsSameLibraryObject { @ExportMessage String m0(@CachedLibrary("this") InExportsLibrary thisLibrary) { return thisLibrary.m1(this); } } @ExportLibrary(InExportsLibrary.class) final class InExportsDifferentLibraryObject { @ExportMessage String m0(@CachedLibrary("this") SomethingLibrary otherLibrary) { return otherLibrary.call(this); } } @ExportLibrary(InExportsLibrary.class) final class InExportsErrorObject1 { @ExpectError("The limit expression has no effect. Multiple specialization instantiations are impossible for this specialization.") @ExportMessage(limit = "5") String m0(@CachedLibrary("this") InExportsLibrary thisLibrary) { return thisLibrary.m1(this); } } @GenerateUncached public abstract static class CachedLibraryErrorNode1 extends Node { abstract String execute(Object receiver); @ExpectError("The limit attribute must be specified if @CachedLibrary is used with a dynamic parameter. E.g. add limit=\"3\" to resolve this.") @Specialization public static String s1(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib2) { return lib2.call(receiver); } } public abstract static class CachedLibraryErrorNode2 extends Node { abstract String execute(Object receiver); @Specialization(limit = "3") public static String s1(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib2) { return lib2.call(receiver); } } public abstract static class CachedLibraryErrorNode3 extends Node { abstract String execute(Object receiver); @Specialization public static String s1(Object receiver, @ExpectError("A specialized value expression or limit must be specified for @CachedLibrary. " + "Use @CachedLibrary(\"value\") for a specialized " + "or @CachedLibrary(limit=\"\") for a dispatched library. " + "See the javadoc of @CachedLibrary for further details.") // @CachedLibrary SomethingLibrary lib2) { return lib2.call(receiver); } } public abstract static class CachedLibraryErrorNode4 extends Node { abstract String execute(Object receiver); @Specialization public static String s1(Object receiver, @ExpectError("The use of @Cached is not supported for libraries. Use @CachedLibrary instead.") // @Cached SomethingLibrary lib2) { return lib2.call(receiver); } } public abstract static class CachedLibraryErrorNode5 extends Node { abstract String execute(Object receiver); @Specialization public static String s1(Object receiver, @ExpectError("Error parsing expression 'foobar': foobar cannot be resolved.") // @CachedLibrary(limit = "foobar") SomethingLibrary lib2) { return lib2.call(receiver); } } public abstract static class CachedLibraryErrorNode6 extends Node { abstract String execute(Object receiver); @Specialization public static String s1(Object receiver, @ExpectError("The limit and specialized value expression cannot be specified at the same time. They are mutually exclusive.") // @CachedLibrary(value = "receiver", limit = "2") SomethingLibrary lib2) { return lib2.call(receiver); } } static class ExplicitReceiver { } @GenerateLibrary(receiverType = ExplicitReceiver.class) @SuppressWarnings("unused") public abstract static class ExplicitReceiverLibrary extends Library { public String call(Object receiver) { return "default"; } } @GenerateUncached public abstract static class SimpleExplicitReceiverNode extends Node { abstract String execute(Object receiver); @Specialization(limit = "2") public static String s0(Object receiver, @CachedLibrary("receiver") ExplicitReceiverLibrary lib1) { return lib1.call(receiver); } } /* * Test that two abstract execute methods generate non-duplicate methods for @CachedLibrary. */ @SuppressWarnings("unused") abstract static class TestDuplicateBoundaryMethod extends Node { abstract Object execute(Object value); abstract Object executeWith(Object value); @Specialization(limit = "5") Object doSomething(Object value, @CachedLibrary("value") SomethingLibrary interop) { return null; } } /* * Test that testNode is not sharable (does not produce a warning) */ public abstract static class SharedLibraryTestNode extends Node { abstract String execute(Object receiver); @Specialization(limit = "2") public static String s0(Object receiver, @CachedLibrary("receiver") SomethingLibrary lib1, @SuppressWarnings("unused") @Cached SimpleNode testNode) { return lib1.call(receiver); } } /* * Test that dispatched libraries in guards don't require an outer limit. */ public abstract static class DispatchedLibrariesSingleInstance extends Node { abstract String execute(Object receiver); @ExpectError("The limit expression has no effect. Multiple specialization instantiations are impossible for this specialization.") @Specialization(guards = "lib1.guard(receiver)", limit = "3") public static String s0(Object receiver, @CachedLibrary(limit = "2") SomethingLibrary lib1) { return lib1.call(receiver); } } /* * Test that a library with a message called execute works. This can easily trigger the code * generator as it often looks for methods with the execute prefix. */ @GenerateLibrary @SuppressWarnings("unused") public abstract static class LibraryWithExecute extends Library { public String execute(Object receiver) { return "default"; } } @ExportLibrary(LibraryWithExecute.class) public abstract static class LibraryThatUsesExecuteMethod { @ExportMessage public String execute(@CachedLibrary("this") SomethingLibrary somethings) { return somethings.call(this); } } @ExportLibrary(LibraryWithExecute.class) public abstract static class LibraryThatUsesExecuteNode { @ExportMessage static class Execute { @Specialization static String doDefault(LibraryThatUsesExecuteNode receiver, @CachedLibrary("receiver") SomethingLibrary somethings) { return somethings.call(receiver); } } } /* * This test was crashing in GR-24920 as the uncached library lookup was accidently using lib1 * to match lib2 for the generated uncached specializations. */ abstract static class DispatchedAndExpressionLibraryNode extends Node { public abstract int execute(Object arg) throws UnsupportedMessageException; @Specialization(limit = "2") static int doBoxed(Object arg, @CachedLibrary(limit = "2") InteropLibrary lib1, @SuppressWarnings("unused") @CachedLibrary("arg") InteropLibrary lib2) throws UnsupportedMessageException { return lib1.asInt(arg); } } }
apache/commons-geometry
35,271
commons-geometry-euclidean/src/test/java/org/apache/commons/geometry/euclidean/oned/IntervalTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.geometry.euclidean.oned; import org.apache.commons.geometry.core.RegionLocation; import org.apache.commons.geometry.core.partitioning.Split; import org.apache.commons.geometry.core.partitioning.SplitLocation; import org.apache.commons.geometry.euclidean.EuclideanTestUtils; import org.apache.commons.numbers.core.Precision; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; class IntervalTest { private static final double TEST_EPS = 1e-15; private static final Precision.DoubleEquivalence TEST_PRECISION = Precision.doubleEquivalenceOfEpsilon(TEST_EPS); @Test void testOf_doubles() { // act/assert checkInterval(Interval.of(0, 0, TEST_PRECISION), 0, 0); checkInterval(Interval.of(1, 2, TEST_PRECISION), 1, 2); checkInterval(Interval.of(2, 1, TEST_PRECISION), 1, 2); checkInterval(Interval.of(-2, -1, TEST_PRECISION), -2, -1); checkInterval(Interval.of(-1, -2, TEST_PRECISION), -2, -1); checkInterval(Interval.of(1, Double.POSITIVE_INFINITY, TEST_PRECISION), 1, Double.POSITIVE_INFINITY); checkInterval(Interval.of(Double.POSITIVE_INFINITY, 1, TEST_PRECISION), 1, Double.POSITIVE_INFINITY); checkInterval(Interval.of(Double.NEGATIVE_INFINITY, 1, TEST_PRECISION), Double.NEGATIVE_INFINITY, 1); checkInterval(Interval.of(1, Double.NEGATIVE_INFINITY, TEST_PRECISION), Double.NEGATIVE_INFINITY, 1); checkInterval(Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); checkInterval(Interval.of(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, TEST_PRECISION), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); } @Test void testOf_doubles_invalidIntervals() { // act/assert Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(1, Double.NaN, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(Double.NaN, 1, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(Double.NaN, Double.NaN, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, TEST_PRECISION)); } @Test void testOf_points() { // act/assert checkInterval(Interval.of(Vector1D.of(1), Vector1D.of(2), TEST_PRECISION), 1, 2); checkInterval(Interval.of(Vector1D.of(Double.POSITIVE_INFINITY), Vector1D.of(Double.NEGATIVE_INFINITY), TEST_PRECISION), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); } @Test void testOf_points_invalidIntervals() { // act/assert Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(Vector1D.of(1), Vector1D.of(Double.NaN), TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of(Vector1D.of(Double.POSITIVE_INFINITY), Vector1D.of(Double.POSITIVE_INFINITY), TEST_PRECISION)); } @Test void testOf_hyperplanes() { // act/assert Assertions.assertSame(Interval.full(), Interval.of(null, null)); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(1, true, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(1, false, TEST_PRECISION)), 1, 1); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(1, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(1, true, TEST_PRECISION)), 1, 1); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(-2, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(5, true, TEST_PRECISION)), -2, 5); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(5, true, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(-2, false, TEST_PRECISION)), -2, 5); checkInterval(Interval.of( null, OrientedPoints.fromLocationAndDirection(5, true, TEST_PRECISION)), Double.NEGATIVE_INFINITY, 5); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(5, true, TEST_PRECISION), null), Double.NEGATIVE_INFINITY, 5); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(Double.NEGATIVE_INFINITY, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(5, true, TEST_PRECISION)), Double.NEGATIVE_INFINITY, 5); checkInterval(Interval.of( null, OrientedPoints.fromLocationAndDirection(5, false, TEST_PRECISION)), 5, Double.POSITIVE_INFINITY); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(5, false, TEST_PRECISION), null), 5, Double.POSITIVE_INFINITY); checkInterval(Interval.of( OrientedPoints.fromLocationAndDirection(Double.POSITIVE_INFINITY, true, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(5, false, TEST_PRECISION)), 5, Double.POSITIVE_INFINITY); } @Test void testOf_hyperplanes_invalidArgs() { // act/assert Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( OrientedPoints.fromLocationAndDirection(1, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(1, false, TEST_PRECISION))); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( OrientedPoints.fromLocationAndDirection(2, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(1, true, TEST_PRECISION))); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( OrientedPoints.fromLocationAndDirection(Double.POSITIVE_INFINITY, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(Double.POSITIVE_INFINITY, true, TEST_PRECISION))); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( OrientedPoints.fromLocationAndDirection(Double.NaN, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(1, true, TEST_PRECISION))); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( OrientedPoints.fromLocationAndDirection(1, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(Double.NaN, true, TEST_PRECISION))); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( OrientedPoints.fromLocationAndDirection(Double.NaN, false, TEST_PRECISION), OrientedPoints.fromLocationAndDirection(Double.NaN, true, TEST_PRECISION))); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.of( null, OrientedPoints.fromLocationAndDirection(Double.NaN, true, TEST_PRECISION))); } @Test void testPoint() { // act/assert checkInterval(Interval.point(0, TEST_PRECISION), 0, 0); checkInterval(Interval.point(1, TEST_PRECISION), 1, 1); checkInterval(Interval.point(-1, TEST_PRECISION), -1, -1); } @Test void testPoint_invalidArgs() { // act/assert Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.point(Double.NEGATIVE_INFINITY, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.point(Double.POSITIVE_INFINITY, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.point(Double.NaN, TEST_PRECISION)); } @Test void testMin() { // act/assert checkInterval(Interval.min(Double.NEGATIVE_INFINITY, TEST_PRECISION), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); checkInterval(Interval.min(0, TEST_PRECISION), 0, Double.POSITIVE_INFINITY); checkInterval(Interval.min(1, TEST_PRECISION), 1, Double.POSITIVE_INFINITY); checkInterval(Interval.min(-1, TEST_PRECISION), -1, Double.POSITIVE_INFINITY); } @Test void testMin_invalidArgs() { // act/assert Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.min(Double.POSITIVE_INFINITY, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.min(Double.NaN, TEST_PRECISION)); } @Test void testMax() { // act/assert checkInterval(Interval.max(Double.POSITIVE_INFINITY, TEST_PRECISION), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); checkInterval(Interval.max(0, TEST_PRECISION), Double.NEGATIVE_INFINITY, 0); checkInterval(Interval.max(1, TEST_PRECISION), Double.NEGATIVE_INFINITY, 1); checkInterval(Interval.max(-1, TEST_PRECISION), Double.NEGATIVE_INFINITY, -1); } @Test void testMax_invalidArgs() { // act/assert Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.max(Double.NEGATIVE_INFINITY, TEST_PRECISION)); Assertions.assertThrows(IllegalArgumentException.class, () -> Interval.max(Double.NaN, TEST_PRECISION)); } @Test void testIsInfinite() { // act/assert Assertions.assertFalse(Interval.of(1, 2, TEST_PRECISION).isInfinite()); Assertions.assertTrue(Interval.of(Double.NEGATIVE_INFINITY, 2, TEST_PRECISION).isInfinite()); Assertions.assertTrue(Interval.of(2, Double.POSITIVE_INFINITY, TEST_PRECISION).isInfinite()); Assertions.assertTrue(Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION).isInfinite()); } @Test void testIsFinite() { // act/assert Assertions.assertTrue(Interval.of(1, 2, TEST_PRECISION).isFinite()); Assertions.assertFalse(Interval.of(Double.NEGATIVE_INFINITY, 2, TEST_PRECISION).isFinite()); Assertions.assertFalse(Interval.of(2, Double.POSITIVE_INFINITY, TEST_PRECISION).isFinite()); Assertions.assertFalse(Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION).isFinite()); } @Test void testClassify_finite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(-1, 1, precision); // act/assert checkClassify(interval, RegionLocation.OUTSIDE, Double.NEGATIVE_INFINITY, -2, -1.1, 1.1, 2, Double.POSITIVE_INFINITY); checkClassify(interval, RegionLocation.BOUNDARY, -1.001, -1, -0.999, 0.999, 1, 1.001); checkClassify(interval, RegionLocation.INSIDE, -0.9, 0, 0.9); checkClassify(interval, RegionLocation.OUTSIDE, Double.NaN); } @Test void testClassify_singlePoint() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(1, 1, precision); // act/assert checkClassify(interval, RegionLocation.OUTSIDE, Double.NEGATIVE_INFINITY, 0, 0.9, 1.1, 2, Double.POSITIVE_INFINITY); checkClassify(interval, RegionLocation.BOUNDARY, 0.999, 1, 1.0001); checkClassify(interval, RegionLocation.OUTSIDE, Double.NaN); } @Test void testClassify_maxInfinite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(-1, Double.POSITIVE_INFINITY, precision); // act/assert checkClassify(interval, RegionLocation.OUTSIDE, Double.NEGATIVE_INFINITY, -2, -1.1); checkClassify(interval, RegionLocation.BOUNDARY, -1.001, -1, -0.999); checkClassify(interval, RegionLocation.INSIDE, -0.9, 0, 1.0, Double.POSITIVE_INFINITY); checkClassify(interval, RegionLocation.OUTSIDE, Double.NaN); } @Test void testClassify_minInfinite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(Double.NEGATIVE_INFINITY, 1, precision); // act/assert checkClassify(interval, RegionLocation.INSIDE, Double.NEGATIVE_INFINITY, 0, 0.9); checkClassify(interval, RegionLocation.BOUNDARY, 0.999, 1, 1.001); checkClassify(interval, RegionLocation.OUTSIDE, 1.1, 2, Double.POSITIVE_INFINITY); checkClassify(interval, RegionLocation.OUTSIDE, Double.NaN); } @Test void testClassify_minMaxInfinite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, precision); // act/assert checkClassify(interval, RegionLocation.INSIDE, Double.NEGATIVE_INFINITY, -1, 0, 1, Double.POSITIVE_INFINITY); checkClassify(interval, RegionLocation.OUTSIDE, Double.NaN); } @Test void testContains_finite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(-1, 1, precision); // act/assert checkContains(interval, true, -1.001, -1, -0.999, 0.999, 1, 1.001, -0.9, 0, 0.9); checkContains(interval, false, Double.NEGATIVE_INFINITY, -2, -1.1, 1.1, 2, Double.POSITIVE_INFINITY); checkContains(interval, false, Double.NaN); } @Test void testIsFull() { // act/assert Assertions.assertFalse(Interval.of(1, 1, TEST_PRECISION).isFull()); Assertions.assertFalse(Interval.of(-2, 2, TEST_PRECISION).isFull()); Assertions.assertFalse(Interval.of(1, Double.POSITIVE_INFINITY, TEST_PRECISION).isFull()); Assertions.assertFalse(Interval.of(Double.NEGATIVE_INFINITY, 1, TEST_PRECISION).isFull()); Assertions.assertTrue(Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION).isFull()); } @Test void testGetSize() { // act/assert Assertions.assertEquals(0, Interval.of(1, 1, TEST_PRECISION).getSize(), TEST_EPS); Assertions.assertEquals(4, Interval.of(-2, 2, TEST_PRECISION).getSize(), TEST_EPS); Assertions.assertEquals(5, Interval.of(2, -3, TEST_PRECISION).getSize(), TEST_EPS); Assertions.assertEquals(Double.POSITIVE_INFINITY, Interval.of(1, Double.POSITIVE_INFINITY, TEST_PRECISION).getSize(), TEST_EPS); Assertions.assertEquals(Double.POSITIVE_INFINITY, Interval.of(Double.NEGATIVE_INFINITY, 1, TEST_PRECISION).getSize(), TEST_EPS); Assertions.assertEquals(Double.POSITIVE_INFINITY, Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION).getSize(), TEST_EPS); } @Test void testGetBoundarySize() { // act/assert Assertions.assertEquals(0, Interval.of(1, 1, TEST_PRECISION).getBoundarySize(), TEST_EPS); Assertions.assertEquals(0, Interval.of(-2, 5, TEST_PRECISION).getBoundarySize(), TEST_EPS); Assertions.assertEquals(0, Interval.full().getBoundarySize(), TEST_EPS); } @Test void testGetCentroid() { // act/assert EuclideanTestUtils.assertCoordinatesEqual(Vector1D.ZERO, Interval.of(-1, 1, TEST_PRECISION).getCentroid(), TEST_EPS); EuclideanTestUtils.assertCoordinatesEqual(Vector1D.of(10), Interval.of(10, 10, TEST_PRECISION).getCentroid(), TEST_EPS); EuclideanTestUtils.assertCoordinatesEqual(Vector1D.of(2), Interval.of(1, 3, TEST_PRECISION).getCentroid(), TEST_EPS); EuclideanTestUtils.assertCoordinatesEqual(Vector1D.of(-1), Interval.of(-2, 0, TEST_PRECISION).getCentroid(), TEST_EPS); Assertions.assertNull(Interval.of(1, Double.POSITIVE_INFINITY, TEST_PRECISION).getCentroid()); Assertions.assertNull(Interval.of(Double.NEGATIVE_INFINITY, 1, TEST_PRECISION).getCentroid()); Assertions.assertNull(Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION).getCentroid()); } @Test void checkToTree_finite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(-1, 1, precision); // act final RegionBSPTree1D tree = interval.toTree(); // assert Assertions.assertEquals(5, tree.count()); checkClassify(tree, RegionLocation.OUTSIDE, Double.NEGATIVE_INFINITY, -2, -1.1, 1.1, 2, Double.POSITIVE_INFINITY); checkClassify(tree, RegionLocation.BOUNDARY, -1.001, -1, -0.999, 0.999, 1, 1.001); checkClassify(tree, RegionLocation.INSIDE, -0.9, 0, 0.9); checkClassify(tree, RegionLocation.OUTSIDE, Double.NaN); } @Test void checkToTree_singlePoint() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(1, 1, precision); // act final RegionBSPTree1D tree = interval.toTree(); // assert Assertions.assertEquals(5, tree.count()); checkClassify(tree, RegionLocation.OUTSIDE, Double.NEGATIVE_INFINITY, 0, 0.9, 1.1, 2, Double.POSITIVE_INFINITY); checkClassify(tree, RegionLocation.BOUNDARY, 0.999, 1, 1.0001); checkClassify(tree, RegionLocation.OUTSIDE, Double.NaN); } @Test void checkToTree_maxInfinite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(-1, Double.POSITIVE_INFINITY, precision); // act final RegionBSPTree1D tree = interval.toTree(); // assert Assertions.assertEquals(3, tree.count()); checkClassify(tree, RegionLocation.OUTSIDE, Double.NEGATIVE_INFINITY, -2, -1.1); checkClassify(tree, RegionLocation.BOUNDARY, -1.001, -1, -0.999); checkClassify(tree, RegionLocation.INSIDE, -0.9, 0, 1.0, Double.POSITIVE_INFINITY); checkClassify(interval, RegionLocation.OUTSIDE, Double.NaN); } @Test void checkToTree_minInfinite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(Double.NEGATIVE_INFINITY, 1, precision); // act final RegionBSPTree1D tree = interval.toTree(); // assert Assertions.assertEquals(3, tree.count()); checkClassify(tree, RegionLocation.INSIDE, Double.NEGATIVE_INFINITY, 0, 0.9); checkClassify(tree, RegionLocation.BOUNDARY, 0.999, 1, 1.001); checkClassify(tree, RegionLocation.OUTSIDE, 1.1, 2, Double.POSITIVE_INFINITY); checkClassify(tree, RegionLocation.OUTSIDE, Double.NaN); } @Test void checkToTree_minMaxInfinite() { // arrange final Precision.DoubleEquivalence precision = Precision.doubleEquivalenceOfEpsilon(1e-2); final Interval interval = Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, precision); // act final RegionBSPTree1D tree = interval.toTree(); // assert Assertions.assertEquals(1, tree.count()); checkClassify(tree, RegionLocation.INSIDE, Double.NEGATIVE_INFINITY, -1, 0, 1, Double.POSITIVE_INFINITY); checkClassify(tree, RegionLocation.OUTSIDE, Double.NaN); } @Test void testProjectToBoundary_full() { // arrange final Interval full = Interval.full(); // act/assert Assertions.assertNull(full.project(Vector1D.of(Double.NEGATIVE_INFINITY))); Assertions.assertNull(full.project(Vector1D.of(0))); Assertions.assertNull(full.project(Vector1D.of(Double.POSITIVE_INFINITY))); } @Test void testProjectToBoundary_singlePoint() { // arrange final Interval interval = Interval.point(1, TEST_PRECISION); // act/assert checkBoundaryProjection(interval, -1, 1); checkBoundaryProjection(interval, 0, 1); checkBoundaryProjection(interval, 1, 1); checkBoundaryProjection(interval, 2, 1); checkBoundaryProjection(interval, 3, 1); checkBoundaryProjection(interval, Double.NEGATIVE_INFINITY, 1); checkBoundaryProjection(interval, Double.POSITIVE_INFINITY, 1); } @Test void testProjectToBoundary_closedInterval() { // arrange final Interval interval = Interval.of(1, 3, TEST_PRECISION); // act/assert checkBoundaryProjection(interval, -1, 1); checkBoundaryProjection(interval, 0, 1); checkBoundaryProjection(interval, 1, 1); checkBoundaryProjection(interval, 1.9, 1); checkBoundaryProjection(interval, 2, 1); checkBoundaryProjection(interval, 2.1, 3); checkBoundaryProjection(interval, 3, 3); checkBoundaryProjection(interval, 4, 3); checkBoundaryProjection(interval, 5, 3); checkBoundaryProjection(interval, Double.NEGATIVE_INFINITY, 1); checkBoundaryProjection(interval, Double.POSITIVE_INFINITY, 3); } @Test void testProjectToBoundary_noMinBoundary() { // arrange final Interval interval = Interval.of(Double.NEGATIVE_INFINITY, 1, TEST_PRECISION); // act/assert checkBoundaryProjection(interval, -1, 1); checkBoundaryProjection(interval, 0, 1); checkBoundaryProjection(interval, 1, 1); checkBoundaryProjection(interval, 2, 1); checkBoundaryProjection(interval, 3, 1); checkBoundaryProjection(interval, Double.NEGATIVE_INFINITY, 1); checkBoundaryProjection(interval, Double.POSITIVE_INFINITY, 1); } @Test void testProjectToBoundary_noMaxBoundary() { // arrange final Interval interval = Interval.of(1, Double.POSITIVE_INFINITY, TEST_PRECISION); // act/assert checkBoundaryProjection(interval, -1, 1); checkBoundaryProjection(interval, 0, 1); checkBoundaryProjection(interval, 1, 1); checkBoundaryProjection(interval, 2, 1); checkBoundaryProjection(interval, 3, 1); checkBoundaryProjection(interval, Double.NEGATIVE_INFINITY, 1); checkBoundaryProjection(interval, Double.POSITIVE_INFINITY, 1); } @Test void testTransform() { // arrange final AffineTransformMatrix1D transform = AffineTransformMatrix1D.createScale(2); // act/assert checkInterval(Interval.of(-1, 2, TEST_PRECISION).transform(transform), -2, 4); checkInterval(Interval.of(Double.NEGATIVE_INFINITY, 2, TEST_PRECISION).transform(transform), Double.NEGATIVE_INFINITY, 4); checkInterval(Interval.of(-1, Double.POSITIVE_INFINITY, TEST_PRECISION).transform(transform), -2, Double.POSITIVE_INFINITY); checkInterval(Interval.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION).transform(transform), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); } @Test void testTransform_reflection() { // arrange final AffineTransformMatrix1D transform = AffineTransformMatrix1D.createScale(-1); // act/assert checkInterval(Interval.of(-1, 2, TEST_PRECISION).transform(transform), -2, 1); checkInterval(Interval.of(Double.NEGATIVE_INFINITY, 2, TEST_PRECISION).transform(transform), -2, Double.POSITIVE_INFINITY); checkInterval(Interval.of(-1, Double.POSITIVE_INFINITY, TEST_PRECISION).transform(transform), Double.NEGATIVE_INFINITY, 1); } @Test void testSplit_full_positiveFacingSplitter() { // arrange final Interval interval = Interval.full(); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), true, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); checkInterval(split.getMinus(), Double.NEGATIVE_INFINITY, 1); checkInterval(split.getPlus(), 1, Double.POSITIVE_INFINITY); } @Test void testSplit_full_negativeFacingSplitter() { // arrange final Interval interval = Interval.full(); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), false, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); checkInterval(split.getMinus(), 1, Double.POSITIVE_INFINITY); checkInterval(split.getPlus(), Double.NEGATIVE_INFINITY, 1); } @Test void testSplit_halfSpace_positiveFacingSplitter() { // arrange final Interval interval = Interval.min(-1, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), true, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); checkInterval(split.getMinus(), -1, 1); checkInterval(split.getPlus(), 1, Double.POSITIVE_INFINITY); } @Test void testSplit_halfSpace_negativeFacingSplitter() { // arrange final Interval interval = Interval.min(-1, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), false, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); checkInterval(split.getMinus(), 1, Double.POSITIVE_INFINITY); checkInterval(split.getPlus(), -1, 1); } @Test void testSplit_splitterBelowInterval() { // arrange final Interval interval = Interval.of(5, 10, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), true, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.PLUS, split.getLocation()); Assertions.assertSame(interval, split.getPlus()); } @Test void testSplit_splitterOnMinBoundary() { // arrange final Interval interval = Interval.of(5, 10, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(5), false, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.MINUS, split.getLocation()); Assertions.assertSame(interval, split.getMinus()); } @Test void testSplit_splitterAboveInterval() { // arrange final Interval interval = Interval.of(5, 10, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(11), true, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.MINUS, split.getLocation()); Assertions.assertSame(interval, split.getMinus()); } @Test void testSplit_splitterOnMaxBoundary() { // arrange final Interval interval = Interval.of(5, 10, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(10), false, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.PLUS, split.getLocation()); Assertions.assertSame(interval, split.getPlus()); } @Test void testSplit_point_minusOnly() { // arrange final Interval interval = Interval.point(2, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), false, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.MINUS, split.getLocation()); checkInterval(split.getMinus(), 2, 2); Assertions.assertNull(split.getPlus()); } @Test void testSplit_point_plusOnly() { // arrange final Interval interval = Interval.point(2, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), true, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.PLUS, split.getLocation()); Assertions.assertNull(split.getMinus()); checkInterval(split.getPlus(), 2, 2); } @Test void testSplit_point_onPoint() { // arrange final Interval interval = Interval.point(1, TEST_PRECISION); final OrientedPoint splitter = OrientedPoints.fromPointAndDirection( Vector1D.of(1), true, TEST_PRECISION); // act final Split<Interval> split = interval.split(splitter); // assert Assertions.assertEquals(SplitLocation.NEITHER, split.getLocation()); Assertions.assertNull(split.getMinus()); Assertions.assertNull(split.getPlus()); } @Test void testToString() { // arrange final Interval interval = Interval.of(2, 1, TEST_PRECISION); // act final String str = interval.toString(); // assert Assertions.assertTrue(str.contains("Interval")); Assertions.assertTrue(str.contains("min= 1.0")); Assertions.assertTrue(str.contains("max= 2.0")); } @Test void testFull() { // act final Interval full = Interval.full(); // assert Assertions.assertTrue(full.isFull()); Assertions.assertFalse(full.isEmpty()); Assertions.assertFalse(full.hasMinBoundary()); Assertions.assertFalse(full.hasMaxBoundary()); Assertions.assertTrue(full.isInfinite()); Assertions.assertEquals(RegionLocation.INSIDE, full.classify(Double.NEGATIVE_INFINITY)); Assertions.assertEquals(RegionLocation.INSIDE, full.classify(Double.POSITIVE_INFINITY)); } private static void checkContains(final Interval interval, final boolean contains, final double... points) { for (final double x : points) { final String msg = "Unexpected contains status for point " + x; Assertions.assertEquals(contains, interval.contains(x), msg); Assertions.assertEquals(contains, interval.contains(Vector1D.of(x)), msg); } } private static void checkClassify(final Interval interval, final RegionLocation loc, final double... points) { for (final double x : points) { final String msg = "Unexpected location for point " + x; Assertions.assertEquals(loc, interval.classify(x), msg); Assertions.assertEquals(loc, interval.classify(Vector1D.of(x)), msg); } } private static void checkClassify(final RegionBSPTree1D tree, final RegionLocation loc, final double... points) { for (final double x : points) { final String msg = "Unexpected location for point " + x; Assertions.assertEquals(loc, tree.classify(x), msg); Assertions.assertEquals(loc, tree.classify(Vector1D.of(x)), msg); } } private static void checkBoundaryProjection(final Interval interval, final double location, final double projectedLocation) { final Vector1D pt = Vector1D.of(location); final Vector1D proj = interval.project(pt); Assertions.assertEquals(projectedLocation, proj.getX(), TEST_EPS); } /** Check that the given interval matches the arguments and is internally consistent. * @param interval * @param min * @param max */ private static void checkInterval(final Interval interval, final double min, final double max) { Assertions.assertEquals(min, interval.getMin(), TEST_EPS); Assertions.assertEquals(max, interval.getMax(), TEST_EPS); final boolean finiteMin = Double.isFinite(min); final boolean finiteMax = Double.isFinite(max); Assertions.assertEquals(finiteMin, interval.hasMinBoundary()); Assertions.assertEquals(finiteMax, interval.hasMaxBoundary()); if (finiteMin) { Assertions.assertEquals(min, interval.getMinBoundary().getLocation(), TEST_EPS); } else { Assertions.assertNull(interval.getMinBoundary()); } if (finiteMax) { Assertions.assertEquals(max, interval.getMaxBoundary().getLocation(), TEST_EPS); } else { Assertions.assertNull(interval.getMaxBoundary()); } Assertions.assertFalse(interval.isEmpty()); // always false } }
apache/nifi
35,044
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestExecuteSQLRecord.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.flowfile.attributes.FragmentAttributes; import org.apache.nifi.json.JsonRecordSetWriter; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processors.standard.sql.RecordSqlWriter; import org.apache.nifi.provenance.ProvenanceEventType; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.serialization.record.MockRecordWriter; import org.apache.nifi.util.MockComponentLog; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.apache.nifi.util.db.JdbcCommon.AvroConversionOptions; import org.apache.nifi.util.db.SimpleCommerceDataSet; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.sql.Array; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TestExecuteSQLRecord { private final Logger LOGGER = LoggerFactory.getLogger(TestExecuteSQLRecord.class); final static String DB_LOCATION = "target/db"; final static String QUERY_WITHOUT_EL = "select " + " PER.ID as PersonId, PER.NAME as PersonName, PER.CODE as PersonCode" + ", PRD.ID as ProductId,PRD.NAME as ProductName,PRD.CODE as ProductCode" + ", REL.ID as RelId, REL.NAME as RelName, REL.CODE as RelCode" + ", ROW_NUMBER() OVER () as rownr " + " from persons PER, products PRD, relationships REL" + " where PER.ID = 10"; @BeforeAll public static void setupClass() { System.setProperty("derby.stream.error.file", "target/derby.log"); } @AfterAll public static void cleanupClass() { System.clearProperty("derby.stream.error.file"); } private TestRunner runner; @BeforeEach public void setup() throws InitializationException { final DBCPService dbcp = new DBCPServiceSimpleImpl(); final Map<String, String> dbcpProperties = new HashMap<>(); runner = TestRunners.newTestRunner(ExecuteSQLRecord.class); runner.addControllerService("dbcp", dbcp, dbcpProperties); runner.enableControllerService(dbcp); runner.setProperty(AbstractExecuteSQL.DBCP_SERVICE, "dbcp"); } @Test public void testIncomingConnectionWithNoFlowFile() throws InitializationException { runner.setIncomingConnection(true); runner.setProperty(AbstractExecuteSQL.SQL_QUERY, "SELECT * FROM persons"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.run(); runner.assertTransferCount(AbstractExecuteSQL.REL_SUCCESS, 0); runner.assertTransferCount(AbstractExecuteSQL.REL_FAILURE, 0); } @Test public void testIncomingConnectionWithNoFlowFileAndNoQuery() throws InitializationException { runner.setIncomingConnection(true); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.run(); runner.assertTransferCount(AbstractExecuteSQL.REL_SUCCESS, 0); runner.assertTransferCount(AbstractExecuteSQL.REL_FAILURE, 0); } @Test public void testNoIncomingConnectionAndNoQuery() { runner.setIncomingConnection(false); assertThrows(AssertionError.class, () -> runner.run()); } @Test public void testNoIncomingConnection() throws SQLException, InitializationException { runner.setIncomingConnection(false); invokeOnTriggerRecords(null, QUERY_WITHOUT_EL, false, null, true); assertEquals(ProvenanceEventType.RECEIVE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test public void testSelectQueryInFlowFile() throws InitializationException, SQLException { invokeOnTriggerRecords(null, QUERY_WITHOUT_EL, true, null, false); assertEquals(ProvenanceEventType.FORK, runner.getProvenanceEvents().get(0).getEventType()); assertEquals(ProvenanceEventType.FETCH, runner.getProvenanceEvents().get(1).getEventType()); } @Test public void testAutoCommitFalse() throws InitializationException, SQLException { runner.setProperty(ExecuteSQL.AUTO_COMMIT, "false"); invokeOnTriggerRecords(null, QUERY_WITHOUT_EL, true, null, false); } @Test public void testAutoCommitTrue() throws InitializationException, SQLException { runner.setProperty(ExecuteSQL.AUTO_COMMIT, "true"); invokeOnTriggerRecords(null, QUERY_WITHOUT_EL, true, null, false); } @Test public void testWithOutputBatching() throws InitializationException, SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); for (int i = 0; i < 1000; i++) { stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (" + i + ", 1, 1)"); } MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.setIncomingConnection(false); runner.setProperty(ExecuteSQLRecord.MAX_ROWS_PER_FLOW_FILE, "5"); runner.setProperty(ExecuteSQLRecord.OUTPUT_BATCH_SIZE, "5"); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "SELECT * FROM TEST_NULL_INT"); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_SUCCESS, 200); runner.assertAllFlowFilesContainAttribute(ExecuteSQLRecord.REL_SUCCESS, FragmentAttributes.FRAGMENT_INDEX.key()); runner.assertAllFlowFilesContainAttribute(ExecuteSQLRecord.REL_SUCCESS, FragmentAttributes.FRAGMENT_ID.key()); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "5"); firstFlowFile.assertAttributeNotExists(FragmentAttributes.FRAGMENT_COUNT.key()); firstFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "0"); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULTSET_INDEX, "0"); MockFlowFile lastFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).get(199); lastFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "5"); lastFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "199"); lastFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULTSET_INDEX, "0"); } @Test public void testWithOutputBatchingAndIncomingFlowFile() throws InitializationException, SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); for (int i = 0; i < 1000; i++) { stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (" + i + ", 1, 1)"); } Map<String, String> attrMap = new HashMap<>(); String testAttrName = "attr1"; String testAttrValue = "value1"; attrMap.put(testAttrName, testAttrValue); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.MAX_ROWS_PER_FLOW_FILE, "5"); runner.setProperty(ExecuteSQLRecord.OUTPUT_BATCH_SIZE, "1"); MockFlowFile inputFlowFile = runner.enqueue("SELECT * FROM TEST_NULL_INT", attrMap); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_SUCCESS, 200); runner.assertAllFlowFilesContainAttribute(ExecuteSQLRecord.REL_SUCCESS, FragmentAttributes.FRAGMENT_INDEX.key()); runner.assertAllFlowFilesContainAttribute(ExecuteSQLRecord.REL_SUCCESS, FragmentAttributes.FRAGMENT_ID.key()); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "5"); firstFlowFile.assertAttributeNotExists(FragmentAttributes.FRAGMENT_COUNT.key()); firstFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "0"); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULTSET_INDEX, "0"); MockFlowFile lastFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).get(199); lastFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "5"); lastFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "199"); lastFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULTSET_INDEX, "0"); lastFlowFile.assertAttributeEquals(testAttrName, testAttrValue); lastFlowFile.assertAttributeEquals(AbstractExecuteSQL.INPUT_FLOWFILE_UUID, inputFlowFile.getAttribute(CoreAttributes.UUID.key())); } @Test public void testWithOutputBatchingLastBatchFails() throws InitializationException, SQLException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 varchar(50), constraint my_pk primary key (id))"); // Insert some valid numeric values (for TO_NUMBER call later) for (int i = 0; i < 11; i++) { stmt.execute("insert into TEST_NULL_INT (id, val1) VALUES (" + i + ", '" + i + "')"); } // Insert invalid numeric value stmt.execute("insert into TEST_NULL_INT (id, val1) VALUES (100, 'abc')"); Map<String, String> attrMap = new HashMap<>(); String testAttrName = "attr1"; String testAttrValue = "value1"; attrMap.put(testAttrName, testAttrValue); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.MAX_ROWS_PER_FLOW_FILE, "5"); runner.enqueue("SELECT ID, CAST(VAL1 AS INTEGER) AS TN FROM TEST_NULL_INT", attrMap); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_FAILURE, 1); runner.assertTransferCount(ExecuteSQLRecord.REL_SUCCESS, 0); } @Test public void testMaxRowsPerFlowFile() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); for (int i = 0; i < 1000; i++) { stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (" + i + ", 1, 1)"); } runner.setIncomingConnection(false); runner.setProperty(AbstractExecuteSQL.MAX_ROWS_PER_FLOW_FILE, "5"); runner.setProperty(AbstractExecuteSQL.OUTPUT_BATCH_SIZE, "0"); runner.setProperty(AbstractExecuteSQL.SQL_QUERY, "SELECT * FROM TEST_NULL_INT"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.run(); runner.assertAllFlowFilesTransferred(AbstractExecuteSQL.REL_SUCCESS, 200); runner.assertTransferCount(AbstractExecuteSQL.REL_FAILURE, 0); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_INDEX.key()); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_ID.key()); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, FragmentAttributes.FRAGMENT_COUNT.key()); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(AbstractExecuteSQL.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(AbstractExecuteSQL.RESULT_ROW_COUNT, "5"); firstFlowFile.assertAttributeEquals("record.count", "5"); firstFlowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "text/plain"); // MockRecordWriter has text/plain MIME type firstFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "0"); firstFlowFile.assertAttributeEquals(AbstractExecuteSQL.RESULTSET_INDEX, "0"); MockFlowFile lastFlowFile = runner.getFlowFilesForRelationship(AbstractExecuteSQL.REL_SUCCESS).get(199); lastFlowFile.assertAttributeEquals(AbstractExecuteSQL.RESULT_ROW_COUNT, "5"); lastFlowFile.assertAttributeEquals("record.count", "5"); lastFlowFile.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "text/plain"); // MockRecordWriter has text/plain MIME type lastFlowFile.assertAttributeEquals(FragmentAttributes.FRAGMENT_INDEX.key(), "199"); lastFlowFile.assertAttributeEquals(AbstractExecuteSQL.RESULTSET_INDEX, "0"); } @Test public void testInsertStatementCreatesFlowFile() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); runner.setIncomingConnection(false); runner.setProperty(AbstractExecuteSQL.SQL_QUERY, "insert into TEST_NULL_INT (id, val1, val2) VALUES (0, NULL, 1)"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.run(); runner.assertAllFlowFilesTransferred(AbstractExecuteSQL.REL_SUCCESS, 1); runner.getFlowFilesForRelationship(AbstractExecuteSQL.REL_SUCCESS).getFirst().assertAttributeEquals(AbstractExecuteSQL.RESULT_ROW_COUNT, "0"); } @Test public void testNoRowsStatementCreatesEmptyFlowFile() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "select * from TEST_NULL_INT"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.enqueue("Hello".getBytes()); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_SUCCESS, 1); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "0"); firstFlowFile.assertContentEquals(""); } @Test public void testNoResultCreatesEmptyFlowFile() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "drop table TEST_NULL_INT"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.enqueue("Hello".getBytes()); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_SUCCESS, 1); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "0"); firstFlowFile.assertContentEquals(""); } @Test public void testWithSqlException() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NO_ROWS"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NO_ROWS (id integer)"); runner.setIncomingConnection(false); // Try a valid SQL statement that will generate an error (val1 does not exist, e.g.) runner.setProperty(AbstractExecuteSQL.SQL_QUERY, "SELECT val1 FROM TEST_NO_ROWS"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.run(); //No incoming flow file containing a query, and an exception causes no outbound flowfile. // There should be no flow files on either relationship runner.assertAllFlowFilesTransferred(AbstractExecuteSQL.REL_FAILURE, 0); runner.assertAllFlowFilesTransferred(AbstractExecuteSQL.REL_SUCCESS, 0); } public void invokeOnTriggerRecords(final Integer queryTimeout, final String query, final boolean incomingFlowFile, final Map<String, String> attrs, final boolean setQueryProperty) throws InitializationException, SQLException { if (queryTimeout != null) { runner.setProperty(AbstractExecuteSQL.QUERY_TIMEOUT, queryTimeout + " secs"); } // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); SimpleCommerceDataSet.loadTestData2Database(con, 100, 200, 100); LOGGER.info("test data loaded"); //commit loaded data if auto-commit is dissabled if (!con.getAutoCommit()) { con.commit(); } // ResultSet size will be 1x200x100 = 20 000 rows // because of where PER.ID = ${person.id} MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); if (incomingFlowFile) { // incoming FlowFile content is not used, but attributes are used final Map<String, String> attributes = (attrs == null) ? new HashMap<>() : attrs; attributes.put("person.id", "10"); if (!setQueryProperty) { runner.enqueue(query.getBytes(), attributes); } else { runner.enqueue("Hello".getBytes(), attributes); } } if (setQueryProperty) { runner.setProperty(AbstractExecuteSQL.SQL_QUERY, query); } runner.run(); runner.assertAllFlowFilesTransferred(AbstractExecuteSQL.REL_SUCCESS, 1); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, AbstractExecuteSQL.RESULT_QUERY_DURATION); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, AbstractExecuteSQL.RESULT_QUERY_EXECUTION_TIME); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, AbstractExecuteSQL.RESULT_QUERY_FETCH_TIME); runner.assertAllFlowFilesContainAttribute(AbstractExecuteSQL.REL_SUCCESS, AbstractExecuteSQL.RESULT_ROW_COUNT); final List<MockFlowFile> flowfiles = runner.getFlowFilesForRelationship(AbstractExecuteSQL.REL_SUCCESS); final long executionTime = Long.parseLong(flowfiles.getFirst().getAttribute(AbstractExecuteSQL.RESULT_QUERY_EXECUTION_TIME)); final long fetchTime = Long.parseLong(flowfiles.getFirst().getAttribute(AbstractExecuteSQL.RESULT_QUERY_FETCH_TIME)); final long durationTime = Long.parseLong(flowfiles.getFirst().getAttribute(AbstractExecuteSQL.RESULT_QUERY_DURATION)); assertEquals(durationTime, fetchTime + executionTime); } @SuppressWarnings("unchecked") @Test public void testWithSqlExceptionErrorProcessingResultSet() throws Exception { DBCPService dbcp = mock(DBCPService.class); Connection conn = mock(Connection.class); when(dbcp.getConnection(any(Map.class))).thenReturn(conn); when(dbcp.getIdentifier()).thenReturn("mockdbcp"); PreparedStatement statement = mock(PreparedStatement.class); when(conn.prepareStatement(anyString())).thenReturn(statement); when(statement.execute()).thenReturn(true); ResultSet rs = mock(ResultSet.class); when(statement.getResultSet()).thenReturn(rs); // Throw an exception the first time you access the ResultSet, this is after the flow file to hold the results has been created. when(rs.getMetaData()).thenThrow(new SQLException("test execute statement failed")); runner.addControllerService("mockdbcp", dbcp, new HashMap<>()); runner.enableControllerService(dbcp); runner.setProperty(AbstractExecuteSQL.DBCP_SERVICE, "mockdbcp"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.setIncomingConnection(true); runner.enqueue("SELECT 1"); runner.run(); runner.assertTransferCount(AbstractExecuteSQL.REL_FAILURE, 1); runner.assertTransferCount(AbstractExecuteSQL.REL_SUCCESS, 0); // Assert exception message has been put to flow file attribute MockFlowFile failedFlowFile = runner.getFlowFilesForRelationship(AbstractExecuteSQL.REL_FAILURE).getFirst(); assertEquals("java.sql.SQLException: test execute statement failed", failedFlowFile.getAttribute(AbstractExecuteSQL.RESULT_ERROR_MESSAGE)); } @Test public void testPreQuery() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); stmt.execute("insert into TEST_NULL_INT values(1,2,3)"); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.SQL_PRE_QUERY, "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS(1);CALL SYSCS_UTIL.SYSCS_SET_STATISTICS_TIMING(1)"); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "select * from TEST_NULL_INT"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.enqueue("test".getBytes()); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_SUCCESS, 1); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "1"); } @Test public void testPostQuery() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); stmt.execute("insert into TEST_NULL_INT values(1,2,3)"); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.SQL_PRE_QUERY, "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS(1);CALL SYSCS_UTIL.SYSCS_SET_STATISTICS_TIMING(1)"); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "select * from TEST_NULL_INT"); runner.setProperty(ExecuteSQLRecord.SQL_POST_QUERY, "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS(0);CALL SYSCS_UTIL.SYSCS_SET_STATISTICS_TIMING(0)"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.enqueue("test".getBytes()); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_SUCCESS, 1); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_SUCCESS).getFirst(); firstFlowFile.assertAttributeEquals(ExecuteSQLRecord.RESULT_ROW_COUNT, "1"); } @Test public void testPreQueryFail() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); runner.setIncomingConnection(true); // Simulate failure by not provide parameter runner.setProperty(ExecuteSQLRecord.SQL_PRE_QUERY, "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS()"); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "select * from TEST_NULL_INT"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.enqueue("test".getBytes()); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_FAILURE, 1); } @Test public void testPostQueryFail() throws Exception { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); Statement stmt = con.createStatement(); try { stmt.execute("drop table TEST_NULL_INT"); } catch (final SQLException ignored) { } stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); runner.setIncomingConnection(true); runner.setProperty(ExecuteSQLRecord.SQL_PRE_QUERY, "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS(1);CALL SYSCS_UTIL.SYSCS_SET_STATISTICS_TIMING(1)"); runner.setProperty(ExecuteSQLRecord.SQL_QUERY, "select * from TEST_NULL_INT"); // Simulate failure by not provide parameter runner.setProperty(ExecuteSQLRecord.SQL_POST_QUERY, "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS()"); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); runner.setProperty(ExecuteSQLRecord.RECORD_WRITER_FACTORY, "writer"); runner.enableControllerService(recordWriter); runner.enqueue("test".getBytes()); runner.run(); runner.assertAllFlowFilesTransferred(ExecuteSQLRecord.REL_FAILURE, 1); MockFlowFile firstFlowFile = runner.getFlowFilesForRelationship(ExecuteSQLRecord.REL_FAILURE).getFirst(); firstFlowFile.assertContentEquals("test"); } @Test public void testArrayOfStringsInference() throws Exception { final ResultSetMetaData meta = mock(ResultSetMetaData.class); when(meta.getColumnCount()).thenReturn(1); when(meta.getColumnLabel(1)).thenReturn("test"); when(meta.getColumnName(1)).thenReturn("test"); when(meta.getColumnType(1)).thenReturn(Types.ARRAY); when(meta.getTableName(1)).thenReturn(""); final ResultSet rs = mock(ResultSet.class); when(rs.getMetaData()).thenReturn(meta); when(rs.next()).thenReturn(true, false); final Array array = mock(Array.class); when(array.getArray()).thenReturn(new String[] {"test"}); when(rs.getArray(1)).thenReturn(array); when(rs.getObject(1)).thenReturn(array); when(rs.getObject("test")).thenReturn(array); final TestRunner localRunner = TestRunners.newTestRunner(ExecuteSQLRecord.class); final RecordSetWriterFactory writerFactory = new JsonRecordSetWriter(); localRunner.addControllerService("writer", writerFactory); localRunner.enableControllerService(writerFactory); final RecordSqlWriter sqlWriter = new RecordSqlWriter(writerFactory, AvroConversionOptions.builder().useLogicalTypes(false).build(), 0, Map.of()); final ByteArrayOutputStream out = new ByteArrayOutputStream(); final ComponentLog log = new MockComponentLog("test", sqlWriter); sqlWriter.writeResultSet(rs, out, log, null); final String json = out.toString(); assertTrue(json.contains("\"test\":[\"test\"]"), "Expected JSON to contain array of strings: " + json); } /** * Simple implementation only for ExecuteSQL processor testing. */ static class DBCPServiceSimpleImpl extends AbstractControllerService implements DBCPService { @Override public String getIdentifier() { return "dbcp"; } @Override public Connection getConnection() throws ProcessException { try { Class.forName("org.apache.derby.jdbc.EmbeddedDriver"); return DriverManager.getConnection("jdbc:derby:" + DB_LOCATION + ";create=true"); } catch (final Exception e) { throw new ProcessException("getConnection failed", e); } } } }
googleapis/google-cloud-java
35,029
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/model_garden_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [ModelGardenService.Deploy][google.cloud.aiplatform.v1beta1.ModelGardenService.Deploy]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.DeployResponse} */ public final class DeployResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.DeployResponse) DeployResponseOrBuilder { private static final long serialVersionUID = 0L; // Use DeployResponse.newBuilder() to construct. private DeployResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeployResponse() { publisherModel_ = ""; endpoint_ = ""; model_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeployResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto .internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto .internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.DeployResponse.class, com.google.cloud.aiplatform.v1beta1.DeployResponse.Builder.class); } public static final int PUBLISHER_MODEL_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object publisherModel_ = ""; /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The publisherModel. */ @java.lang.Override public java.lang.String getPublisherModel() { java.lang.Object ref = publisherModel_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); publisherModel_ = s; return s; } } /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for publisherModel. */ @java.lang.Override public com.google.protobuf.ByteString getPublisherModelBytes() { java.lang.Object ref = publisherModel_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); publisherModel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENDPOINT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object endpoint_ = ""; /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The endpoint. */ @java.lang.Override public java.lang.String getEndpoint() { java.lang.Object ref = endpoint_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); endpoint_ = s; return s; } } /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for endpoint. */ @java.lang.Override public com.google.protobuf.ByteString getEndpointBytes() { java.lang.Object ref = endpoint_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); endpoint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int MODEL_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object model_ = ""; /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The model. */ @java.lang.Override public java.lang.String getModel() { java.lang.Object ref = model_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); model_ = s; return s; } } /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for model. */ @java.lang.Override public com.google.protobuf.ByteString getModelBytes() { java.lang.Object ref = model_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); model_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publisherModel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, publisherModel_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(endpoint_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, endpoint_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publisherModel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, publisherModel_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(endpoint_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, endpoint_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.DeployResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.DeployResponse other = (com.google.cloud.aiplatform.v1beta1.DeployResponse) obj; if (!getPublisherModel().equals(other.getPublisherModel())) return false; if (!getEndpoint().equals(other.getEndpoint())) return false; if (!getModel().equals(other.getModel())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PUBLISHER_MODEL_FIELD_NUMBER; hash = (53 * hash) + getPublisherModel().hashCode(); hash = (37 * hash) + ENDPOINT_FIELD_NUMBER; hash = (53 * hash) + getEndpoint().hashCode(); hash = (37 * hash) + MODEL_FIELD_NUMBER; hash = (53 * hash) + getModel().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.DeployResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [ModelGardenService.Deploy][google.cloud.aiplatform.v1beta1.ModelGardenService.Deploy]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.DeployResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.DeployResponse) com.google.cloud.aiplatform.v1beta1.DeployResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto .internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto .internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.DeployResponse.class, com.google.cloud.aiplatform.v1beta1.DeployResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.DeployResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; publisherModel_ = ""; endpoint_ = ""; model_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto .internal_static_google_cloud_aiplatform_v1beta1_DeployResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.DeployResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.DeployResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.DeployResponse build() { com.google.cloud.aiplatform.v1beta1.DeployResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.DeployResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.DeployResponse result = new com.google.cloud.aiplatform.v1beta1.DeployResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.DeployResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.publisherModel_ = publisherModel_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.endpoint_ = endpoint_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.model_ = model_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.DeployResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.DeployResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.DeployResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.DeployResponse.getDefaultInstance()) return this; if (!other.getPublisherModel().isEmpty()) { publisherModel_ = other.publisherModel_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getEndpoint().isEmpty()) { endpoint_ = other.endpoint_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getModel().isEmpty()) { model_ = other.model_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { publisherModel_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { endpoint_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { model_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object publisherModel_ = ""; /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The publisherModel. */ public java.lang.String getPublisherModel() { java.lang.Object ref = publisherModel_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); publisherModel_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for publisherModel. */ public com.google.protobuf.ByteString getPublisherModelBytes() { java.lang.Object ref = publisherModel_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); publisherModel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The publisherModel to set. * @return This builder for chaining. */ public Builder setPublisherModel(java.lang.String value) { if (value == null) { throw new NullPointerException(); } publisherModel_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearPublisherModel() { publisherModel_ = getDefaultInstance().getPublisherModel(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Output only. The name of the PublisherModel resource. * Format: * `publishers/{publisher}/models/{publisher_model}&#64;{version_id}`, or * `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}&#64;001` * </pre> * * <code> * string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for publisherModel to set. * @return This builder for chaining. */ public Builder setPublisherModelBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); publisherModel_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object endpoint_ = ""; /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The endpoint. */ public java.lang.String getEndpoint() { java.lang.Object ref = endpoint_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); endpoint_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for endpoint. */ public com.google.protobuf.ByteString getEndpointBytes() { java.lang.Object ref = endpoint_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); endpoint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The endpoint to set. * @return This builder for chaining. */ public Builder setEndpoint(java.lang.String value) { if (value == null) { throw new NullPointerException(); } endpoint_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearEndpoint() { endpoint_ = getDefaultInstance().getEndpoint(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Output only. The name of the Endpoint created. * Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` * </pre> * * <code> * string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for endpoint to set. * @return This builder for chaining. */ public Builder setEndpointBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); endpoint_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object model_ = ""; /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The model. */ public java.lang.String getModel() { java.lang.Object ref = model_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); model_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for model. */ public com.google.protobuf.ByteString getModelBytes() { java.lang.Object ref = model_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); model_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The model to set. * @return This builder for chaining. */ public Builder setModel(java.lang.String value) { if (value == null) { throw new NullPointerException(); } model_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearModel() { model_ = getDefaultInstance().getModel(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Output only. The name of the Model created. * Format: `projects/{project}/locations/{location}/models/{model}` * </pre> * * <code> * string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for model to set. * @return This builder for chaining. */ public Builder setModelBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); model_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.DeployResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.DeployResponse) private static final com.google.cloud.aiplatform.v1beta1.DeployResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.DeployResponse(); } public static com.google.cloud.aiplatform.v1beta1.DeployResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeployResponse> PARSER = new com.google.protobuf.AbstractParser<DeployResponse>() { @java.lang.Override public DeployResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeployResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeployResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.DeployResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
34,988
java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/ListRoomsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: schema/google/showcase/v1beta1/messaging.proto // Protobuf Java Version: 3.25.8 package com.google.showcase.v1beta1; /** * * * <pre> * The response message for the google.showcase.v1beta1.Messaging&#92;ListRooms * method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ListRoomsResponse} */ public final class ListRoomsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.showcase.v1beta1.ListRoomsResponse) ListRoomsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRoomsResponse.newBuilder() to construct. private ListRoomsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRoomsResponse() { rooms_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRoomsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_ListRoomsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_ListRoomsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ListRoomsResponse.class, com.google.showcase.v1beta1.ListRoomsResponse.Builder.class); } public static final int ROOMS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.showcase.v1beta1.Room> rooms_; /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ @java.lang.Override public java.util.List<com.google.showcase.v1beta1.Room> getRoomsList() { return rooms_; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.showcase.v1beta1.RoomOrBuilder> getRoomsOrBuilderList() { return rooms_; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ @java.lang.Override public int getRoomsCount() { return rooms_.size(); } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.Room getRooms(int index) { return rooms_.get(index); } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.RoomOrBuilder getRoomsOrBuilder(int index) { return rooms_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < rooms_.size(); i++) { output.writeMessage(1, rooms_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < rooms_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, rooms_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.showcase.v1beta1.ListRoomsResponse)) { return super.equals(obj); } com.google.showcase.v1beta1.ListRoomsResponse other = (com.google.showcase.v1beta1.ListRoomsResponse) obj; if (!getRoomsList().equals(other.getRoomsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRoomsCount() > 0) { hash = (37 * hash) + ROOMS_FIELD_NUMBER; hash = (53 * hash) + getRoomsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ListRoomsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListRoomsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListRoomsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.showcase.v1beta1.ListRoomsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the google.showcase.v1beta1.Messaging&#92;ListRooms * method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ListRoomsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.ListRoomsResponse) com.google.showcase.v1beta1.ListRoomsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_ListRoomsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_ListRoomsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ListRoomsResponse.class, com.google.showcase.v1beta1.ListRoomsResponse.Builder.class); } // Construct using com.google.showcase.v1beta1.ListRoomsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (roomsBuilder_ == null) { rooms_ = java.util.Collections.emptyList(); } else { rooms_ = null; roomsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_ListRoomsResponse_descriptor; } @java.lang.Override public com.google.showcase.v1beta1.ListRoomsResponse getDefaultInstanceForType() { return com.google.showcase.v1beta1.ListRoomsResponse.getDefaultInstance(); } @java.lang.Override public com.google.showcase.v1beta1.ListRoomsResponse build() { com.google.showcase.v1beta1.ListRoomsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.showcase.v1beta1.ListRoomsResponse buildPartial() { com.google.showcase.v1beta1.ListRoomsResponse result = new com.google.showcase.v1beta1.ListRoomsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.showcase.v1beta1.ListRoomsResponse result) { if (roomsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { rooms_ = java.util.Collections.unmodifiableList(rooms_); bitField0_ = (bitField0_ & ~0x00000001); } result.rooms_ = rooms_; } else { result.rooms_ = roomsBuilder_.build(); } } private void buildPartial0(com.google.showcase.v1beta1.ListRoomsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.showcase.v1beta1.ListRoomsResponse) { return mergeFrom((com.google.showcase.v1beta1.ListRoomsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.showcase.v1beta1.ListRoomsResponse other) { if (other == com.google.showcase.v1beta1.ListRoomsResponse.getDefaultInstance()) return this; if (roomsBuilder_ == null) { if (!other.rooms_.isEmpty()) { if (rooms_.isEmpty()) { rooms_ = other.rooms_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRoomsIsMutable(); rooms_.addAll(other.rooms_); } onChanged(); } } else { if (!other.rooms_.isEmpty()) { if (roomsBuilder_.isEmpty()) { roomsBuilder_.dispose(); roomsBuilder_ = null; rooms_ = other.rooms_; bitField0_ = (bitField0_ & ~0x00000001); roomsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRoomsFieldBuilder() : null; } else { roomsBuilder_.addAllMessages(other.rooms_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.showcase.v1beta1.Room m = input.readMessage(com.google.showcase.v1beta1.Room.parser(), extensionRegistry); if (roomsBuilder_ == null) { ensureRoomsIsMutable(); rooms_.add(m); } else { roomsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.showcase.v1beta1.Room> rooms_ = java.util.Collections.emptyList(); private void ensureRoomsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { rooms_ = new java.util.ArrayList<com.google.showcase.v1beta1.Room>(rooms_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Room, com.google.showcase.v1beta1.Room.Builder, com.google.showcase.v1beta1.RoomOrBuilder> roomsBuilder_; /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.Room> getRoomsList() { if (roomsBuilder_ == null) { return java.util.Collections.unmodifiableList(rooms_); } else { return roomsBuilder_.getMessageList(); } } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public int getRoomsCount() { if (roomsBuilder_ == null) { return rooms_.size(); } else { return roomsBuilder_.getCount(); } } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public com.google.showcase.v1beta1.Room getRooms(int index) { if (roomsBuilder_ == null) { return rooms_.get(index); } else { return roomsBuilder_.getMessage(index); } } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder setRooms(int index, com.google.showcase.v1beta1.Room value) { if (roomsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRoomsIsMutable(); rooms_.set(index, value); onChanged(); } else { roomsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder setRooms(int index, com.google.showcase.v1beta1.Room.Builder builderForValue) { if (roomsBuilder_ == null) { ensureRoomsIsMutable(); rooms_.set(index, builderForValue.build()); onChanged(); } else { roomsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder addRooms(com.google.showcase.v1beta1.Room value) { if (roomsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRoomsIsMutable(); rooms_.add(value); onChanged(); } else { roomsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder addRooms(int index, com.google.showcase.v1beta1.Room value) { if (roomsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRoomsIsMutable(); rooms_.add(index, value); onChanged(); } else { roomsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder addRooms(com.google.showcase.v1beta1.Room.Builder builderForValue) { if (roomsBuilder_ == null) { ensureRoomsIsMutable(); rooms_.add(builderForValue.build()); onChanged(); } else { roomsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder addRooms(int index, com.google.showcase.v1beta1.Room.Builder builderForValue) { if (roomsBuilder_ == null) { ensureRoomsIsMutable(); rooms_.add(index, builderForValue.build()); onChanged(); } else { roomsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder addAllRooms( java.lang.Iterable<? extends com.google.showcase.v1beta1.Room> values) { if (roomsBuilder_ == null) { ensureRoomsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, rooms_); onChanged(); } else { roomsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder clearRooms() { if (roomsBuilder_ == null) { rooms_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { roomsBuilder_.clear(); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public Builder removeRooms(int index) { if (roomsBuilder_ == null) { ensureRoomsIsMutable(); rooms_.remove(index); onChanged(); } else { roomsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public com.google.showcase.v1beta1.Room.Builder getRoomsBuilder(int index) { return getRoomsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public com.google.showcase.v1beta1.RoomOrBuilder getRoomsOrBuilder(int index) { if (roomsBuilder_ == null) { return rooms_.get(index); } else { return roomsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public java.util.List<? extends com.google.showcase.v1beta1.RoomOrBuilder> getRoomsOrBuilderList() { if (roomsBuilder_ != null) { return roomsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(rooms_); } } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public com.google.showcase.v1beta1.Room.Builder addRoomsBuilder() { return getRoomsFieldBuilder() .addBuilder(com.google.showcase.v1beta1.Room.getDefaultInstance()); } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public com.google.showcase.v1beta1.Room.Builder addRoomsBuilder(int index) { return getRoomsFieldBuilder() .addBuilder(index, com.google.showcase.v1beta1.Room.getDefaultInstance()); } /** * * * <pre> * The list of rooms. * </pre> * * <code>repeated .google.showcase.v1beta1.Room rooms = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.Room.Builder> getRoomsBuilderList() { return getRoomsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Room, com.google.showcase.v1beta1.Room.Builder, com.google.showcase.v1beta1.RoomOrBuilder> getRoomsFieldBuilder() { if (roomsBuilder_ == null) { roomsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Room, com.google.showcase.v1beta1.Room.Builder, com.google.showcase.v1beta1.RoomOrBuilder>( rooms_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); rooms_ = null; } return roomsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in ListRoomsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Messaging&#92;ListRooms` method to retrieve * the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.ListRoomsResponse) } // @@protoc_insertion_point(class_scope:google.showcase.v1beta1.ListRoomsResponse) private static final com.google.showcase.v1beta1.ListRoomsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.showcase.v1beta1.ListRoomsResponse(); } public static com.google.showcase.v1beta1.ListRoomsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRoomsResponse> PARSER = new com.google.protobuf.AbstractParser<ListRoomsResponse>() { @java.lang.Override public ListRoomsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRoomsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRoomsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.showcase.v1beta1.ListRoomsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/nosql
34,962
kvmain/src/main/java/oracle/kv/xregion/XRService.java
/*- * Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.kv.xregion; import static java.lang.Long.parseLong; import static java.util.concurrent.TimeUnit.SECONDS; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; import java.io.RandomAccessFile; import java.lang.management.ManagementFactory; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; import oracle.kv.impl.util.CommandParser; import oracle.kv.impl.util.FormatUtils; import oracle.kv.impl.util.KVStoreMain; import oracle.kv.impl.util.RateLimitingLogger; import oracle.kv.impl.util.client.ClientLoggerUtils; import oracle.kv.impl.xregion.service.JsonConfig; import oracle.kv.impl.xregion.service.ServiceMDMan; import oracle.kv.impl.xregion.service.XRegionService; import oracle.nosql.common.contextlogger.LogFormatter; import com.sleepycat.je.log.FileManager; /** * Object represents the entry point of cross-region service agent. */ public class XRService { /** * max number of logging files */ private static final int LOG_FILE_LIMIT_COUNTS = Integer.getInteger("oracle.kv.xregion.logfile.count", 20); /** * size limit of each logging file in bytes */ private static final int LOG_FILE_LIMIT_BYTES = Integer.getInteger("oracle.kv.xregion.logfile.limit", 100 * 1024 * 1024); /** * failure injection for test to kill process */ public static final String KILL_PROCESS_VAR_TEST = "oracle.kv.xregion.test.killProcessTest"; private static final boolean KILL_PROCESS_TEST = Boolean.getBoolean(KILL_PROCESS_VAR_TEST); /** * failure injection for test to delete status file */ public static final String REMOVE_STATUS_VAR_TEST = "oracle.kv.xregion.test.removeStatusTest"; private static final boolean REMOVE_STATUS_TEST = Boolean.getBoolean(REMOVE_STATUS_VAR_TEST); /** * Test only, wait time in seconds in killing process */ private static final int TEST_WAIT_KILL_SECS = 10; /* retry interval in ms if host region is not reachable */ private final static int UNREACHABLE_HOST_REGION_RETRY_MS = 6 * 1000; /* Time out value of 30000 millisecond */ private static final long TIMEOUT_MS = 30 * 1000; /* Interval value of 1000 millisecond */ private static final long INTV_MS = 1000; /* Wait value for stopping agent forcefully */ private static final long WAIT_TIME_MS = 10; private final static String DEFAULT_LOG_DIR = "log"; private final static String LOG_FILE_SUFFIX = ".log"; private final static String LOG_FILE_NAME_SPLITTER = "."; private final static String LOCK_FILE_SUFFIX = ".lck"; private final static String PID_FILE_SUFFIX = ".pid"; /* External commands, for "java -jar" usage. */ public static final String START_COMMAND_NAME = "xrstart"; public static final String START_COMMAND_DESC = "start cross-region " + "(xregion) service"; public static final String STATUS_COMMAND_NAME = "xrstatus"; public static final String STATUS_COMMAND_DESC = "status of cross-region" + "(xregion) service"; public static final String STOP_COMMAND_NAME = "xrstop"; public static final String STOP_COMMAND_DESC = "stop cross-region " + "(xregion) service"; private static final String STOP_FORCE_FLAG = "-force"; private static final String CONFIG_FLAG = "-config"; private static final String START_BG_FLAG = "-bg"; public static final String START_COMMAND_ARGS = CommandParser.optional(CONFIG_FLAG + " <JSON config file>") + " " + CommandParser.optional(START_BG_FLAG); public static final String STATUS_COMMAND_ARGS = CommandParser.optional(CONFIG_FLAG + " <JSON config file>"); public static final String STOP_COMMAND_ARGS = CommandParser.optional(CONFIG_FLAG + " <JSON config file>") + " " + CommandParser.optional(STOP_FORCE_FLAG); /* rate limiting log period in ms */ private static final int RL_LOG_PERIOD_MS = 10 * 1000; /* list of arguments */ private final String[] args; /* status file name base and extension */ private static String STATUS_FILE_NAME_BASE = "status"; private static String STATUS_FILE_NAME_EXT = "txt"; /* JSON configuration file for bootstrap */ private String json; /* * false if stop the agent after pending requests are done and * checkpoint of each stream is done, true if immediately stop the agent, * the default is false */ private boolean force = false; /* * true if the service is running in background, true otherwise */ private boolean background = false; /* lock file manager */ private final LockFileManager lockMan; /* json config */ private final JsonConfig conf; /* command */ private String command; /* logger */ private Logger logger; /* rate limiting logger */ private final RateLimitingLogger<String> rlLogger; /* status file name */ private final String statusFileName; /** * Parsing status of agent: * Success: agent successfully started * Failure: agent failed to start * Duplicate: another agent already running * Crashed: agent started but eventually stopped working */ public enum StartParseStatus { SUCCESS, FAILURE, DUPLICATE, CRASHED; } interface ExitCode { int getCode(); String getMsg(); default boolean isSuccessful() { return false; } } /** * Exit code for command starting agent in bg: * Success: agent successfully started * Failed: agent failed to start * Duplicate: another agent already running * Crashed: agent start but eventually stopped working * Timeout: no status file found even though agent started */ public enum StartExitCode implements ExitCode { SUCCESS(0, "Started successfully") { @Override public boolean isSuccessful() { return true; } }, FAILED(1, "Failed to start"), TIMEOUT(2, "Failed with timeout"), CRASHED(3, "Crashed"), DUPLICATE(4, "Already running"); final int code; final String msg; StartExitCode(int code, String msg) { this.code = code; this.msg = msg; } @Override public int getCode(){ return code; } @Override public String getMsg(){ return msg; } } /** * Exit code for checking agent status: * Success is agent successfully running * Failed is agent not running * Crashed is agent start but eventually stopped working */ public enum StatusExitCode implements ExitCode { SUCCESS(0, "Agent running") { @Override public boolean isSuccessful() { return true; } }, FAILED(1, "Agent not running"), CRASHED(2, "Agent crashed"); final int code; final String msg; StatusExitCode(int code, String msg) { this.code = code; this.msg = msg; } @Override public int getCode(){ return code; } @Override public String getMsg(){ return msg; } } /** * Exit code for stopping agent: * Stop: stopped successfully * Nonstop: not stopped successfully */ public enum StopExitCode implements ExitCode { STOP(0, "Stopped") { @Override public boolean isSuccessful() { return true; } }, NONSTOP(1, "Failed to stop"); final int code; final String msg; StopExitCode(int code, String msg) { this.code = code; this.msg = msg; } @Override public int getCode(){ return code; } @Override public String getMsg(){ return msg; } } private XRService(final String[] args) throws IOException { this.args = args; parseArgs(); try { conf = JsonConfig.readJsonFile(json, logger); statusFileName = getStatusFileName(conf); } catch (Exception exp) { final String err = "cannot parse the configuration file " + json + ", " + exp.getMessage(); throw new IllegalArgumentException(err, exp); } try { logger = getServiceLogger(conf); rlLogger = new RateLimitingLogger<>(RL_LOG_PERIOD_MS, 8, logger); } catch (IOException ioe) { final String err = "cannot create logger for region=" + conf.getRegion() + ", " + ioe.getMessage(); throw new IllegalStateException(err, ioe); } lockMan = new LockFileManager(conf); /* dump the json config with parameters in log file */ logger.info(lm("Run XRegion Service with command=" + command + ", configuration=" + json + ", status file=" + statusFileName + ", argument list=" + Arrays.toString(args))); } public static void main(String[] args) { try { final XRService xrs = new XRService(args); final ExitCode result = xrs.run(); if (result.isSuccessful()) { System.out.println(result.getMsg()); } else{ System.err.println(result.getMsg()); } System.exit(result.getCode()); } catch (Exception exp) { System.err.println("Error in executing command=" + args[args.length - 1] + " for cross-region service, " + exp.getMessage()); System.exit(StartExitCode.FAILED.getCode()); } } /** * Builds the agent id * * @param conf json config * @return agent id */ public static String buildAgentId(JsonConfig conf) { return conf.getRegion() + LOG_FILE_NAME_SPLITTER + conf.getAgentGroupSize() + LOG_FILE_NAME_SPLITTER + conf.getAgentId(); } @Override public String toString() { return "command=" + command + ", json=" + json + (command.equals(STOP_COMMAND_ARGS) ? ", force=" + force : ""); } /*---------------------* * Private Functions * *---------------------*/ private static void usage(final String message) { if (message != null) { System.err.println("\n" + message + "\n"); } System.err.println("Usage: + XRService"); System.err.println("\t[ xrstart | xrstop | xrstatus ] " + "-config <JSON config file> [ -bg ] [ -force ]"); System.exit(1); } private static void usageStop() { System.err.println("Usage: + XRService"); System.err.println("\t[ xrstop ]" + "-config <JSON config file> [ -force ]"); System.exit(1); } private static void usageStatus() { System.err.println("Usage: + XRService"); System.err.println("\t[ xrstatus ]" + "-config <JSON config file>"); System.exit(1); } private static String[] heapSizeFinder(String jsonPath) { JsonConfig configure; try { configure = JsonConfig.readJsonFile(jsonPath, null); } catch (Exception exp) { final String err = "Cannot parse the configuration file at path=" + jsonPath + ", error=" + exp.getMessage(); throw new IllegalArgumentException(err, exp); } final String xms = ("-Xms" + configure.getBgInitHeapSizeMB() + "m"); final String xmx = ("-Xmx" + configure.getBgMaxHeapSizeMB() + "m"); return new String[] {xms, xmx}; } private static String getStatusFileName(JsonConfig config) { /* status file name: e.g., status.2.0.txt, status.2.1.txt, etc. */ return STATUS_FILE_NAME_BASE + "." + config.getAgentGroupSize() + "." + config.getAgentId() + "." + STATUS_FILE_NAME_EXT; } /** * Method to check if status file is present with timeout * */ private boolean statusFilePresent(String fileDir) throws InterruptedException { final File file = new File(fileDir, statusFileName); final long stopTime = System.currentTimeMillis() + TIMEOUT_MS; String fileString = file.toString(); assert testDeleteStatus(fileString); if (!file.exists()) { while (true) { final long wait = stopTime - System.currentTimeMillis(); if (wait <= 0) { break; } /* for testing only */ assert testDeleteStatus(fileString); if (file.exists()) { return true; } final long interval = Math.min(wait, INTV_MS); Thread.sleep(interval); } return false; } return true; } /** * Method to check if pid is running * */ private static boolean isProcessRunning(long processID) throws IOException, InterruptedException { String[] cmd = {"ps", "-p", String.valueOf(processID)}; ProcessBuilder build = new ProcessBuilder().command(cmd); build.redirectErrorStream(true); build.redirectOutput(ProcessBuilder.Redirect.DISCARD); Process p = build.start(); return p.waitFor() == 0; } /** * Failure injection for testing * */ private static boolean testKillProcess(long pid) throws InterruptedException{ if (KILL_PROCESS_TEST) { final Optional<ProcessHandle> optHandle = ProcessHandle.of(pid); final ProcessHandle handle = optHandle.get(); handle.destroyForcibly(); try { handle.onExit().get(TEST_WAIT_KILL_SECS, SECONDS); } catch (ExecutionException | TimeoutException e) { throw new RuntimeException(e); } } return true; } /** * Failure injection for testing * */ private static boolean testDeleteStatus(String fileDir) { if (REMOVE_STATUS_TEST) { final File file = new File(fileDir); file.delete(); if (file.exists()) { System.err.println("Fail to delete file=" + fileDir); } } return true; } /** * Process builder to start a new process * */ public static ProcessBuilder buildProcess(String jsonPath){ final List<String> cmd = new ArrayList<>(); final String[] heapSize = heapSizeFinder(jsonPath); final String cp = System.getProperty("java.class.path"); final String className = KVStoreMain.class.getName(); Collections.addAll(cmd,"java", heapSize[0], heapSize[1], "-cp", cp, className, "xrstart", "-config", jsonPath); return new ProcessBuilder().command(cmd); } /** * Start agent in background * */ public Process startProcess() throws IOException{ final ProcessBuilder build = buildProcess(json); build.redirectError(ProcessBuilder.Redirect.INHERIT); build.redirectOutput(ProcessBuilder.Redirect.DISCARD); return build.start(); } /** * Parse status file with given process id * */ private StartParseStatus statusParseWithPid(String fileDir, long pid) { final File file = new File(fileDir, statusFileName); if (file.exists()) { try (BufferedReader br = new BufferedReader( new FileReader(fileDir + "/" + statusFileName))) { final String line = br.readLine(); final long pid1 = parseLong(line); if (pid != pid1) { return StartParseStatus.DUPLICATE; } assert testKillProcess(pid1); if (isProcessRunning(pid1)) { return StartParseStatus.SUCCESS; } return StartParseStatus.CRASHED; } catch (IOException| InterruptedException e) { return StartParseStatus.FAILURE; } } return StartParseStatus.FAILURE; } /** * Parse status file * */ private StartParseStatus statusParse(String fileDir) { try (BufferedReader br = new BufferedReader( new FileReader(fileDir + "/" + statusFileName))) { final String line = br.readLine(); final long pid = parseLong(line); assert testKillProcess(pid); if (isProcessRunning(pid)) { return StartParseStatus.SUCCESS; } return StartParseStatus.CRASHED; } catch (IOException|InterruptedException e) { return StartParseStatus.FAILURE; } } /** * Runs one of the commands */ private ExitCode run() throws IOException, InterruptedException { switch (command) { case START_COMMAND_NAME: if (background) { return runStartBg(json); } return runStart(); case STOP_COMMAND_NAME: if (background) { /* stop command cannot run in background */ usageStop(); break; } return runStop(); case STATUS_COMMAND_NAME: if (background) { /* status command cannot run in background */ usageStatus(); break; } return runStatus(json); default: throw new IllegalStateException("Unsupported command " + command); } throw new IllegalStateException("Cannot run command=" + command); } private StartExitCode runStart() { final String dir = new File(json).getAbsoluteFile().getParent(); final String tmpPath = dir + "/" + statusFileName + ".tmp"; final String resPath = dir + "/" + statusFileName; try (PrintWriter pw = new PrintWriter(tmpPath)) { if (!lockMan.lock()) { System.err.println( "Duplicate cross-region service is not allowed, id=" + buildAgentId(conf)); return StartExitCode.DUPLICATE; } /* get the lock */ XRegionService service; int attempts = 0; while (true) { try { service = new XRegionService(conf, logger); break; } catch (ServiceMDMan.UnreachableHostRegionException exp) { attempts++; final String msg = "Please check if the local region=" + conf.getRegion() + " is online, will retry after " + UNREACHABLE_HOST_REGION_RETRY_MS + " ms, # attempts=" + attempts + ", error " + exp.getCause().getMessage(); rlLogger.log(conf.getRegion(), Level.WARNING, lm(msg)); synchronized (this) { wait(UNREACHABLE_HOST_REGION_RETRY_MS); } } } /* add shutdown hook */ addShutdownHook(service); /* start service */ service.start(); final long pid = lockMan.readPid(); final String ts = FormatUtils.formatDateTime(System.currentTimeMillis()); String result = pid + "\n" + "Cross-region agent (region=" + conf.getRegion() + ", store=" + conf.getStore() + ", helpers=" + Arrays.toString(conf.getHelpers()) + ") starts up from config file=" + json + " at " + ts; pw.write(result); pw.close(); Files.move(Paths.get(tmpPath), Paths.get(resPath), StandardCopyOption.ATOMIC_MOVE); System.out.println(result); /* wait for service to exit */ service.join(); return StartExitCode.SUCCESS; }catch (Exception exp) { System.err.println("Cannot start cross-region service agent: " + exp); return StartExitCode.FAILED; } finally { lockMan.release(); } } /** * Run agent in bg and get return code * */ private StartExitCode runStartBg(String jsonPath) throws IOException, InterruptedException { final String fileDir = new File(jsonPath).getParent(); final Process p = startProcess(); /* * Wait to give time for process to generate exit code if any */ p.waitFor(INTV_MS, TimeUnit.MILLISECONDS); if (p.isAlive()) { final long pid = p.pid(); boolean statusFile = statusFilePresent(fileDir); if (statusFile) { final StartParseStatus statusRes = statusParseWithPid(fileDir, pid); switch (statusRes) { case SUCCESS: return StartExitCode.SUCCESS; case DUPLICATE: return StartExitCode.DUPLICATE; case CRASHED: return StartExitCode.CRASHED; default: return StartExitCode.FAILED; } } return StartExitCode.TIMEOUT; } /* return the status from the process exit code */ final int code = p.exitValue(); return StartExitCode.values()[code]; } /** * Run status command and get return value * */ private StatusExitCode runStatus(String fileDir){ final String dir = new File(fileDir).getParent(); final StartParseStatus status = statusParse(dir); switch (status){ case SUCCESS: return StatusExitCode.SUCCESS; case CRASHED: return StatusExitCode.CRASHED; default: return StatusExitCode.FAILED; } } private StopExitCode runStop() { String dir = new File(json).getParent(); String resPath = dir + "/" + statusFileName; File status = new File(resPath); try { final long pid = lockMan.readPid(); final String error = runKill(pid, force); final String ts = FormatUtils.formatDateTime(System.currentTimeMillis()); if (error == null) { String result = pid + "\n" + "Cross-region service (pid=" + pid + ", region=" + conf.getRegion() + ", store=" + conf.getStore() + ") shuts down (force=" + force + ")" + " at time=" + ts; status.delete(); lockMan.deletePid(); System.out.println(result); return StopExitCode.STOP; } System.err.println("Cannot shut down cross-region service " + "(pid=" + pid + ", region=" + conf.getRegion() + ", store=" + conf.getStore() + ", force=" + force + ")" + " at time=" + ts + ", error=" + error); return StopExitCode.NONSTOP; } catch (Exception exp) { System.err.println(exp.getMessage()); } return StopExitCode.NONSTOP; } /** * Parses the argument list */ private void parseArgs() { int nArgs = args.length; /* get the command */ command = args[nArgs - 1]; if (!command.equals(START_COMMAND_NAME) && !command.equals(STOP_COMMAND_NAME) && !command.equals(STATUS_COMMAND_NAME)) { usage("invalid command: " + command); } int argc = 0; while (argc < nArgs - 1) { final String thisArg = args[argc++]; if ("-config".equals(thisArg)) { if (argc < nArgs) { json = args[argc++]; } else if (args[0].equals("start")) { usage("-config requires an argument to start"); } } else if ("-bg".equals(thisArg)) { background = true; } else if ("-force".equals(thisArg)) { force = true; } else { usage("Unknown argument: " + thisArg); } } } /** * Gets client side logger for agent thread * * @return client side logger */ private static Logger getServiceLogger(JsonConfig conf) throws IOException { final Logger logger = ClientLoggerUtils.getLogger( XRService.class, XRService.class.getSimpleName()); final String dir = createLogDirIfNotExist(conf.getAgentRoot()); final String fileName = buildLogFileName(conf); final File lf = new File(dir, fileName); /* TODO: Provide a way for users to customize the logging config */ final FileHandler fh = new FileHandler(lf.getAbsolutePath(), LOG_FILE_LIMIT_BYTES, LOG_FILE_LIMIT_COUNTS, true); fh.setFormatter(new LogFormatter(null)); logger.addHandler(fh); return logger; } /* create log directory if not exist */ private static String createLogDirIfNotExist(String path) throws IOException { final Path dir = Paths.get(path, DEFAULT_LOG_DIR); if (!Files.exists(dir)) { try { Files.createDirectories(dir); } catch (IOException exp) { System.err.println("Cannot create log directory " + dir.getFileName() + ", error " + exp.getMessage()); throw exp; } } return dir.toString(); } /* build the log file name from agent id */ private static String buildLogFileName(JsonConfig conf) { return buildAgentId(conf) + LOG_FILE_SUFFIX; } private static String buildLockFileName(JsonConfig conf) { return buildAgentId(conf) + LOCK_FILE_SUFFIX; } public static String buildPidFileName(JsonConfig conf) { return buildAgentId(conf) + PID_FILE_SUFFIX; } /** * Kill the process with the specified ID, waiting for the process to exit. * * @param pid the process ID * @param force whether to force kill * @return null if command exits normally, or error output otherwise */ private static String runKill(long pid, boolean force) { final Optional<ProcessHandle> optHandle = ProcessHandle.of(pid); /* Process is already dead */ if (optHandle.isEmpty()) { return "Process with pid=" + pid + " is already dead"; } final ProcessHandle handle = optHandle.get(); if (force) { handle.destroyForcibly(); } else { handle.destroy(); } /* Wait for the process to exit */ try { handle.onExit().get(WAIT_TIME_MS, SECONDS); return null; } catch (Exception e) { return "Error in stop the service, error=" + e; } } private void addShutdownHook(XRegionService service) { logger.fine(() -> lm("Adding shutdown hook")); Runtime.getRuntime().addShutdownHook(new ShutdownThread(service)); } private class LockFileManager { private final String lockFileDir; private final String pidFileName; /* The channel and lock for the lock file. */ private final RandomAccessFile lockFile; private FileLock exclLock = null; LockFileManager(JsonConfig conf) throws IOException { pidFileName = buildPidFileName(conf); lockFileDir = conf.getAgentRoot(); /* lock file name and dir */ final String lockFileName = buildLockFileName(conf); lockFile = new RandomAccessFile( new File(lockFileDir, lockFileName), FileManager.FileMode.READWRITE_MODE.getModeValue()); } public boolean lock() throws IOException { final FileChannel channel = lockFile.getChannel(); try { /* lock exclusive */ exclLock = channel.tryLock(0, 1, false); final boolean succ = (exclLock != null); if (succ) { /* delete previous pid file if existent */ deletePid(); /* persist process id for stop */ writePid(); } return succ; } catch (OverlappingFileLockException e) { return false; } } public void release() { try { if (exclLock != null) { exclLock.release(); } } catch (Exception e) { /* ignore? */ } } /* read pid from pid file */ long readPid() throws IOException { final File file = new File(lockFileDir, pidFileName); if (!file.exists()) { throw new IOException("Cannot find PID file=" + file.getAbsolutePath() + ", check the file and if the " + "service has already shut down."); } if (file.length() == 0) { throw new IOException("Empty PID file: " + file.getAbsolutePath()); } /* only read the first line */ final String line = Files.lines(file.toPath()).iterator().next(); return Long.parseLong(line); } /* delete pid file */ void deletePid() { final File file = new File(lockFileDir, pidFileName); if (!file.exists()) { return; } if (!file.delete()) { logger.info(lm("Fail to delete pid file=" + file.getAbsolutePath())); } } /* write pid to pid file */ private void writePid() throws IOException { final long pid = getPid(); if (pid == 0) { final String err = "Cannot determine process id"; throw new IOException(err); } final List<String> lines = Collections.singletonList(String.valueOf(pid)); final Path file = Paths.get(lockFileDir, pidFileName); try { Files.write(file, lines); } catch (IOException ioe) { logger.warning(lm("Cannot write process id=" + pid + " to pid file=" + file.toAbsolutePath())); throw ioe; } } /* get pid from OS */ private long getPid() { //TODO: simplify when upgrade to Java 9+ // return ProcessHandle.current().pid(); /* Java 8 */ final String processName = ManagementFactory.getRuntimeMXBean().getName(); if (processName != null && processName.length() > 0) { try { return Long.parseLong(processName.split("@")[0]); } catch (Exception e) { return 0; } } return 0; } } /* Provide a shutdown hook so that if the service is killed externally */ private class ShutdownThread extends Thread { private final XRegionService service; ShutdownThread(XRegionService service) { this.service = service; } @Override public void run() { logger.info(lm("Shutdown thread running, stopping services")); try { service.shutdown(); /* wait for shutdown complete */ service.join(); } catch (Exception exp) { /* ignored in shut down */ } finally { logger.info(lm("Shutdown complete")); } } } private String lm(String msg) { return "[XRegionService] " + msg; } }
google/guice
34,891
core/test/com/google/inject/spi/ModuleAnnotatedMethodScannerTest.java
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.spi; import static com.google.common.truth.Truth.assertThat; import static com.google.inject.name.Names.named; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.inject.AbstractModule; import com.google.inject.Binder; import com.google.inject.Binding; import com.google.inject.ConfigurationException; import com.google.inject.CreationException; import com.google.inject.Exposed; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.PrivateModule; import com.google.inject.Provides; import com.google.inject.internal.Annotations; import com.google.inject.internal.ProviderMethodsModule; import com.google.inject.internal.util.StackTraceElements; import com.google.inject.name.Named; import com.google.inject.name.Names; import java.lang.annotation.Annotation; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; import java.lang.reflect.Method; import java.util.Set; import jakarta.inject.Qualifier; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link ModuleAnnotatedMethodScanner} usage. */ @RunWith(JUnit4.class) public class ModuleAnnotatedMethodScannerTest { @Test public void scanning() throws Exception { Module module = new AbstractModule() { @TestProvides @Named("foo") String foo() { return "foo"; } @TestProvides @Named("foo2") String foo2() { return "foo2"; } }; Injector injector = Guice.createInjector(module, scannerModule(new NamedMunger())); // assert no bindings named "foo" or "foo2" exist -- they were munged. assertMungedBinding(injector, String.class, "foo", "foo"); assertMungedBinding(injector, String.class, "foo2", "foo2"); Binding<String> fooBinding = injector.getBinding(Key.get(String.class, named("foo-munged"))); Binding<String> foo2Binding = injector.getBinding(Key.get(String.class, named("foo2-munged"))); // Validate the provider has a sane toString assertThat(methodName(TestProvides.class, "foo", module)) .isEqualTo(fooBinding.getProvider().toString()); assertThat(methodName(TestProvides.class, "foo2", module)) .isEqualTo(foo2Binding.getProvider().toString()); } @Test public void skipSources() throws Exception { Module module = new AbstractModule() { @Override protected void configure() { binder() .skipSources(getClass()) .install( new AbstractModule() { @TestProvides @Named("foo") String foo() { return "foo"; } }); } }; Injector injector = Guice.createInjector(module, scannerModule(new NamedMunger())); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void withSource() throws Exception { Module module = new AbstractModule() { @Override protected void configure() { binder() .withSource("source") .install( new AbstractModule() { @TestProvides @Named("foo") String foo() { return "foo"; } }); } }; Injector injector = Guice.createInjector(module, scannerModule(new NamedMunger())); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void moreThanOneClaimedAnnotationFails() throws Exception { Module module = new AbstractModule() { @TestProvides @TestProvides2 String foo() { return "foo"; } }; CreationException creationException = assertThatInjectorCreationFails(module, scannerModule(new NamedMunger())); assertThat(creationException.getErrorMessages()).hasSize(1); assertThat(creationException) .hasMessageThat() .contains( "More than one annotation claimed by NamedMunger on method" + " ModuleAnnotatedMethodScannerTest$4.foo(). Methods can only have one annotation" + " claimed per scanner."); } private String methodName(Class<? extends Annotation> annotation, String method, Object container) throws Exception { return Annotations.annotationInstanceClassString(annotation, /* includePackage= */ true) + " " + StackTraceElements.forMember(container.getClass().getDeclaredMethod(method)); } @Documented @Target(METHOD) @Retention(RUNTIME) private @interface TestProvides {} @Documented @Target(METHOD) @Retention(RUNTIME) private @interface TestProvides2 {} private static class NamedMunger extends ModuleAnnotatedMethodScanner { @Override public String toString() { return "NamedMunger"; } @Override public Set<? extends Class<? extends Annotation>> annotationClasses() { return ImmutableSet.of(TestProvides.class, TestProvides2.class); } @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { return key.withAnnotation(Names.named(((Named) key.getAnnotation()).value() + "-munged")); } } private void assertMungedBinding( Injector injector, Class<?> clazz, String originalName, Object expectedValue) { assertThat(injector.getExistingBinding(Key.get(clazz, named(originalName)))).isNull(); Binding<?> fooBinding = injector.getBinding(Key.get(clazz, named(originalName + "-munged"))); assertThat(fooBinding.getProvider().get()).isEqualTo(expectedValue); } @Test public void failingScanner() { CreationException creationException = assertThatInjectorCreationFails(new SomeModule(), scannerModule(new FailingScanner())); Message m = Iterables.getOnlyElement(creationException.getErrorMessages()); assertThat(m.getMessage()) .isEqualTo("An exception was caught and reported. Message: Failing in the scanner."); assertThat(creationException).hasCauseThat().isInstanceOf(IllegalStateException.class); ElementSource source = (ElementSource) Iterables.getOnlyElement(m.getSources()); assertThat(SomeModule.class.getName()) .isEqualTo(Iterables.getOnlyElement(source.getModuleClassNames())); assertThat(String.class.getName() + " " + SomeModule.class.getName() + ".aString()") .isEqualTo(source.toString()); } @Test public void sannerFailureDoesNotPropagateDownstream() { Module module = new AbstractModule() { @TestProvides @Named("foo") String provideFoo() { return "FOO"; } @Provides @Named("bar") String providesBar(@Named("foo") String foo) { return "uses " + foo; } }; CreationException exception = assertThrows( CreationException.class, () -> Guice.createInjector(module, scannerModule(new FailingScanner()))); // Verify that failure in binding @Named("foo") doesn't effect bindings that depends on it. assertThat(exception.getErrorMessages()).hasSize(1); } public static class FailingScanner extends ModuleAnnotatedMethodScanner { @Override public Set<? extends Class<? extends Annotation>> annotationClasses() { return ImmutableSet.of(TestProvides.class); } @Override public <T> Key<T> prepareMethod( Binder binder, Annotation rawAnnotation, Key<T> key, InjectionPoint injectionPoint) { throw new IllegalStateException("Failing in the scanner."); } } static class SomeModule extends AbstractModule { @TestProvides String aString() { return "Foo"; } } @Test public void childInjectorInheritsScanner() { Injector parent = Guice.createInjector(scannerModule(new NamedMunger())); Injector child = parent.createChildInjector( new AbstractModule() { @TestProvides @Named("foo") String foo() { return "foo"; } }); assertMungedBinding(child, String.class, "foo", "foo"); } @Test public void childInjectorScannersDontImpactSiblings() { Module module = new AbstractModule() { @TestProvides @Named("foo") String foo() { return "foo"; } }; Injector parent = Guice.createInjector(); Injector child = parent.createChildInjector(scannerModule(new NamedMunger()), module); assertMungedBinding(child, String.class, "foo", "foo"); // no foo nor foo-munged in sibling, since scanner never saw it. Injector sibling = parent.createChildInjector(module); assertThat(sibling.getExistingBinding(Key.get(String.class, named("foo")))).isNull(); assertThat(sibling.getExistingBinding(Key.get(String.class, named("foo-munged")))).isNull(); } @Test public void privateModuleInheritScanner_usingPrivateModule() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new PrivateModule() { @Override protected void configure() {} @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleInheritsScanner_scannerInstalledAfterPrivateModule() { Injector injector = Guice.createInjector( new PrivateModule() { @Override protected void configure() {} @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }, // Scanner installed after private module. scannerModule(new NamedMunger())); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModule_skipSourcesWithinPrivateModule() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new PrivateModule() { @Override protected void configure() { binder() .skipSources(getClass()) .install( new AbstractModule() { @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModule_skipSourcesForPrivateModule() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new AbstractModule() { @Override protected void configure() { binder() .skipSources(getClass()) .install( new PrivateModule() { @Override protected void configure() {} @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleInheritScanner_usingPrivateBinder() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new AbstractModule() { @Override protected void configure() { binder() .newPrivateBinder() .install( new AbstractModule() { @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleInheritScanner_skipSourcesFromPrivateBinder() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new AbstractModule() { @Override protected void configure() { binder() .newPrivateBinder() .skipSources(getClass()) .install( new AbstractModule() { @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleInheritScanner_skipSourcesFromPrivateBinder2() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new AbstractModule() { @Override protected void configure() { binder() .skipSources(getClass()) .newPrivateBinder() .install( new AbstractModule() { @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleScannersDontImpactSiblings_usingPrivateModule() { Injector injector = Guice.createInjector( new PrivateModule() { @Override protected void configure() { install(scannerModule(new NamedMunger())); } @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }, new PrivateModule() { @Override protected void configure() {} // ignored! (because the scanner doesn't run over this module) @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleScannersDontImpactSiblings_usingPrivateBinder() { Injector injector = Guice.createInjector( new AbstractModule() { @Override protected void configure() { binder() .newPrivateBinder() .install( new AbstractModule() { @Override protected void configure() { install(scannerModule(new NamedMunger())); } @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }, new AbstractModule() { @Override protected void configure() { binder() .newPrivateBinder() .install( new AbstractModule() { // ignored! (because the scanner doesn't run over this module) @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleWithinPrivateModule() { Injector injector = Guice.createInjector( scannerModule(new NamedMunger()), new PrivateModule() { @Override protected void configure() { expose(Key.get(String.class, named("foo-munged"))); install( new PrivateModule() { @Override protected void configure() {} @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void privateModuleWithinPrivateModule_parentScannerInheritedIfInstalledAfter() { Injector injector = Guice.createInjector( new PrivateModule() { @Override protected void configure() { expose(Key.get(String.class, named("foo-munged"))); install( new PrivateModule() { @Override protected void configure() {} @Exposed @TestProvides @Named("foo") String foo() { return "foo"; } }); } }, scannerModule(new NamedMunger())); assertMungedBinding(injector, String.class, "foo", "foo"); } @Test public void abstractMethodsAreScannedForOverrides() { abstract class Superclass { @TestProvides abstract boolean abstractTest(); } abstract class Subclass extends Superclass { @TestProvides @Override abstract boolean abstractTest(); } ModuleAnnotatedMethodScanner testScanner = new ModuleAnnotatedMethodScanner() { @Override public Set<? extends Class<? extends Annotation>> annotationClasses() { return ImmutableSet.of(TestProvides.class); } @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { return null; } }; CreationException creationException = assertThatInjectorCreationFails( ProviderMethodsModule.forModule(Subclass.class, testScanner)); assertThat(creationException) .hasMessageThat() .contains( "Overriding @ModuleAnnotatedMethodScannerTest.TestProvides methods is not allowed."); } static class Superclass { @TestProvides boolean booleanTest() { return true; } } static class Subclass extends Superclass { @TestProvides @Override boolean booleanTest() { return true; } } static class IgnoringScanner extends ModuleAnnotatedMethodScanner { private final Class<?> classToIgnore; private int ignoredCounter = 0; IgnoringScanner(Class<?> classToIgnore) { this.classToIgnore = classToIgnore; } @Override public Set<? extends Class<? extends Annotation>> annotationClasses() { return ImmutableSet.of(TestProvides.class); } @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { Method method = (Method) injectionPoint.getMember(); if (method.getDeclaringClass().equals(classToIgnore)) { ignoredCounter++; return null; } return key; } int ignoredCounter() { return ignoredCounter; } } @Test public void ignoreMethodsScannedForOverridesSubclass() { IgnoringScanner scanner = new IgnoringScanner(Subclass.class); CreationException creationException = assertThatInjectorCreationFails(ProviderMethodsModule.forModule(new Subclass(), scanner)); assertThat(creationException) .hasMessageThat() .contains( "Overriding @ModuleAnnotatedMethodScannerTest.TestProvides methods is not allowed."); assertThat(scanner.ignoredCounter()).isEqualTo(1); // checking that there was a method ignored. } @Test public void ignoreMethodsScannedForOverridesSuperclass() { IgnoringScanner scanner = new IgnoringScanner(Superclass.class); CreationException creationException = assertThatInjectorCreationFails(ProviderMethodsModule.forModule(new Subclass(), scanner)); assertThat(creationException) .hasMessageThat() .contains( "Overriding @ModuleAnnotatedMethodScannerTest.TestProvides methods is not allowed."); assertThat(scanner.ignoredCounter()).isEqualTo(1); // checking that there was a method ignored. } static class TestScanner extends ModuleAnnotatedMethodScanner { ImmutableSet<Class<? extends Annotation>> annotations; TestScanner(Class<? extends Annotation>... annotations) { this.annotations = ImmutableSet.copyOf(annotations); } @Override public Set<? extends Class<? extends Annotation>> annotationClasses() { return annotations; } @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { return key; } } @Test public void ignoreMethods() { class ModuleWithMethodsToIgnore { @TestProvides boolean booleanTest() { return true; } @TestProvides int ignore() { return 0; } } ModuleAnnotatedMethodScanner filteringScanner = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { Method method = (Method) injectionPoint.getMember(); if (method.getName().equals("ignore")) { return null; } return key; } }; Injector filteredInjector = Guice.createInjector( ProviderMethodsModule.forModule(new ModuleWithMethodsToIgnore(), filteringScanner)); assertThat(filteredInjector.getInstance(Key.get(Boolean.class))).isTrue(); assertThrows(ConfigurationException.class, () -> filteredInjector.getInstance(Integer.class)); Injector unfilteredInjector = Guice.createInjector( ProviderMethodsModule.forModule( new ModuleWithMethodsToIgnore(), new TestScanner(TestProvides.class))); assertThat(unfilteredInjector.getInstance(Key.get(Boolean.class))).isTrue(); assertThat(unfilteredInjector.getInstance(Integer.class)).isEqualTo(0); } @Test public void scannerCantRegisterScanner() { ModuleAnnotatedMethodScanner scannerRegisteringScanner = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { binder.scanModulesForAnnotatedMethods(new TestScanner(TestProvides2.class)); return key; } }; CreationException creationException = assertThatInjectorCreationFails( scannerModule(scannerRegisteringScanner), new AbstractModule() { @TestProvides boolean bogus() { return true; } }); assertThat(creationException) .hasMessageThat() .contains("Scanners are not allowed to register other scanners"); } @Test public void scannerCantInstallModuleWithCustomProvidesMethods() { ModuleAnnotatedMethodScanner scannerInstallingScannableModule = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { binder.install( new AbstractModule() { @TestProvides2 int bogus() { return 0; } }); return key; } }; CreationException creationException = assertThatInjectorCreationFails( scannerModule(scannerInstallingScannableModule), scannerModule(new TestScanner(TestProvides2.class)), new AbstractModule() { @TestProvides boolean bogus() { return true; } }); assertThat(creationException) .hasMessageThat() .contains( "Installing modules with custom provides methods from a ModuleAnnotatedMethodScanner" + " is not supported"); } @Test public void scannerCantInstallPrivateModuleWithCustomProvidesMethods() { ModuleAnnotatedMethodScanner scannerInstallingScannablePrivateModule = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { binder.install( new PrivateModule() { @Override protected void configure() {} @TestProvides2 int bogus() { return 0; } }); return key; } }; CreationException creationException = assertThatInjectorCreationFails( scannerModule(scannerInstallingScannablePrivateModule), scannerModule(new TestScanner(TestProvides2.class)), new AbstractModule() { @TestProvides boolean bogus() { return true; } }); assertThat(creationException) .hasMessageThat() .contains( "Installing modules with custom provides methods from a ModuleAnnotatedMethodScanner" + " is not supported"); } @Test public void scannerCanInstallModuleWithRegularProvidesMethods() { ModuleAnnotatedMethodScanner scanner = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { binder.install( new AbstractModule() { @Provides int provideAnswer() { return 42; } }); return key; } }; Injector injector = Guice.createInjector( scannerModule(scanner), new AbstractModule() { @TestProvides boolean bogus() { return true; } }); assertThat(injector.getInstance(Integer.class)).isEqualTo(42); } CreationException assertThatInjectorCreationFails(Module... modules) { return assertThrows(CreationException.class, () -> Guice.createInjector(modules)); } @Test public void scannerSourceCorrectForNonGuiceModule() { class NonGuiceModule { @TestProvides boolean booleanTest() { return true; } } TestScanner testScanner = new TestScanner(TestProvides.class); Injector injector = Guice.createInjector(ProviderMethodsModule.forModule(new NonGuiceModule(), testScanner)); assertThat(getSourceScanner(injector.getBinding(Boolean.class))).isEqualTo(testScanner); } @Qualifier @Retention(RUNTIME) @interface Foo {} @Test public void scannerSourceCorrectForGuiceModule() { Module module = new AbstractModule() { @TestProvides @Foo boolean booleanTest() { return true; } @Provides String stringTest() { return ""; } @Override protected void configure() { bind(Long.class).toInstance(1L); } }; TestScanner testScanner = new TestScanner(TestProvides.class); Injector injector = Guice.createInjector(module, scannerModule(testScanner)); assertThat(getSourceScanner(injector.getBinding(Key.get(Boolean.class, Foo.class)))) .isEqualTo(testScanner); assertThat(getSourceScanner(injector.getBinding(String.class))).isNotEqualTo(testScanner); assertThat(getSourceScanner(injector.getBinding(Long.class))).isNull(); } @Test public void scannerSourceCorrectForBindingsCreatedByTheScannerDirectly() { ModuleAnnotatedMethodScanner scanner = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { binder.bind(key.ofType(String.class)).toInstance("bla"); return null; } }; Injector injector = Guice.createInjector( new AbstractModule() { @TestProvides @Foo Long discardedLong() { return 1L; } }, scannerModule(scanner)); assertThat(getSourceScanner(injector.getBinding(Key.get(String.class, Foo.class)))) .isEqualTo(scanner); } @Test public void scannerSourceOfProvidesMethodBindingInsideCustomScannerIsCustomScanner() { ModuleAnnotatedMethodScanner scanner = new TestScanner(TestProvides.class) { @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint) { binder.install( new AbstractModule() { // All bindings inside custom scanner should have it as their source scanner - // including those created by a nested built-in @Provides* scanner. @Provides String provideString() { return "bla"; } }); return null; } }; Injector injector = Guice.createInjector( new AbstractModule() { @TestProvides @Foo Long discardedLong() { return 1L; } }, scannerModule(scanner)); assertThat(getSourceScanner(injector.getBinding(String.class))).isEqualTo(scanner); } @Test public void scannerSourceForPrivateModule() { Module module = new AbstractModule() { @Override protected void configure() { install( new PrivateModule() { @Override protected void configure() {} @Exposed @TestProvides @Foo String privateString() { return "bar"; } }); } }; TestScanner scanner = new TestScanner(TestProvides.class); Injector injector = Guice.createInjector(module, scannerModule(scanner)); assertThat(getSourceScanner(injector.getBinding(Key.get(String.class, Foo.class)))) .isEqualTo(scanner); } static class DelegatingScanner extends ModuleAnnotatedMethodScanner { Object delegate = null; @Override public Set<? extends Class<? extends Annotation>> annotationClasses() { return ImmutableSet.of(TestProvides.class); } @Override public <T> Key<T> prepareMethod( Binder binder, Annotation annotation, Key<T> key, InjectionPoint injectionPoint, Object delegate) { this.delegate = delegate; return key; } } @Test public void scannerPropagatesTheDelegateObject() { DelegatingScanner scanner = new DelegatingScanner(); AbstractModule module = new AbstractModule() { @TestProvides String provideString() { return "foo"; } }; Guice.createInjector(scannerModule(scanner), module); assertThat(scanner.delegate).isSameInstanceAs(module); } static class StaticProvider extends AbstractModule { @TestProvides static String provideString() { return "foo"; } } @Test public void staticScannerPropagatesNullDelegateObject() { DelegatingScanner scanner = new DelegatingScanner(); Guice.createInjector(ProviderMethodsModule.forModule(StaticProvider.class, scanner)); assertThat(scanner.delegate).isNull(); } ModuleAnnotatedMethodScanner getSourceScanner(Binding<?> binding) { return ((ElementSource) binding.getSource()).scanner; } private static Module scannerModule(ModuleAnnotatedMethodScanner scanner) { return new AbstractModule() { @Override protected void configure() { binder().scanModulesForAnnotatedMethods(scanner); } }; } }
googleapis/google-cloud-java
34,967
java-life-sciences/proto-google-cloud-life-sciences-v2beta/src/main/java/com/google/cloud/lifesciences/v2beta/Disk.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/lifesciences/v2beta/workflows.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.lifesciences.v2beta; /** * * * <pre> * Carries information about a disk that can be attached to a VM. * * See https://cloud.google.com/compute/docs/disks/performance for more * information about disk type, size, and performance considerations. * * Specify either [`Volume`][google.cloud.lifesciences.v2beta.Volume] or * [`Disk`][google.cloud.lifesciences.v2beta.Disk], but not both. * </pre> * * Protobuf type {@code google.cloud.lifesciences.v2beta.Disk} */ public final class Disk extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.lifesciences.v2beta.Disk) DiskOrBuilder { private static final long serialVersionUID = 0L; // Use Disk.newBuilder() to construct. private Disk(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Disk() { name_ = ""; type_ = ""; sourceImage_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Disk(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_Disk_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_Disk_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.lifesciences.v2beta.Disk.class, com.google.cloud.lifesciences.v2beta.Disk.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SIZE_GB_FIELD_NUMBER = 2; private int sizeGb_ = 0; /** * * * <pre> * The size, in GB, of the disk to attach. If the size is not * specified, a default is chosen to ensure reasonable I/O performance. * * If the disk type is specified as `local-ssd`, multiple local drives are * automatically combined to provide the requested size. Note, however, that * each physical SSD is 375GB in size, and no more than 8 drives can be * attached to a single instance. * </pre> * * <code>int32 size_gb = 2;</code> * * @return The sizeGb. */ @java.lang.Override public int getSizeGb() { return sizeGb_; } public static final int TYPE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object type_ = ""; /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @return The type. */ @java.lang.Override public java.lang.String getType() { java.lang.Object ref = type_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); type_ = s; return s; } } /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @return The bytes for type. */ @java.lang.Override public com.google.protobuf.ByteString getTypeBytes() { java.lang.Object ref = type_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); type_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SOURCE_IMAGE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object sourceImage_ = ""; /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @return The sourceImage. */ @java.lang.Override public java.lang.String getSourceImage() { java.lang.Object ref = sourceImage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceImage_ = s; return s; } } /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @return The bytes for sourceImage. */ @java.lang.Override public com.google.protobuf.ByteString getSourceImageBytes() { java.lang.Object ref = sourceImage_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceImage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (sizeGb_ != 0) { output.writeInt32(2, sizeGb_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceImage_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, sourceImage_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (sizeGb_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, sizeGb_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, type_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceImage_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, sourceImage_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.lifesciences.v2beta.Disk)) { return super.equals(obj); } com.google.cloud.lifesciences.v2beta.Disk other = (com.google.cloud.lifesciences.v2beta.Disk) obj; if (!getName().equals(other.getName())) return false; if (getSizeGb() != other.getSizeGb()) return false; if (!getType().equals(other.getType())) return false; if (!getSourceImage().equals(other.getSourceImage())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + SIZE_GB_FIELD_NUMBER; hash = (53 * hash) + getSizeGb(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + getType().hashCode(); hash = (37 * hash) + SOURCE_IMAGE_FIELD_NUMBER; hash = (53 * hash) + getSourceImage().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.Disk parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.lifesciences.v2beta.Disk parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.lifesciences.v2beta.Disk parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.lifesciences.v2beta.Disk prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Carries information about a disk that can be attached to a VM. * * See https://cloud.google.com/compute/docs/disks/performance for more * information about disk type, size, and performance considerations. * * Specify either [`Volume`][google.cloud.lifesciences.v2beta.Volume] or * [`Disk`][google.cloud.lifesciences.v2beta.Disk], but not both. * </pre> * * Protobuf type {@code google.cloud.lifesciences.v2beta.Disk} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.lifesciences.v2beta.Disk) com.google.cloud.lifesciences.v2beta.DiskOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_Disk_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_Disk_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.lifesciences.v2beta.Disk.class, com.google.cloud.lifesciences.v2beta.Disk.Builder.class); } // Construct using com.google.cloud.lifesciences.v2beta.Disk.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; sizeGb_ = 0; type_ = ""; sourceImage_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.lifesciences.v2beta.WorkflowsProto .internal_static_google_cloud_lifesciences_v2beta_Disk_descriptor; } @java.lang.Override public com.google.cloud.lifesciences.v2beta.Disk getDefaultInstanceForType() { return com.google.cloud.lifesciences.v2beta.Disk.getDefaultInstance(); } @java.lang.Override public com.google.cloud.lifesciences.v2beta.Disk build() { com.google.cloud.lifesciences.v2beta.Disk result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.lifesciences.v2beta.Disk buildPartial() { com.google.cloud.lifesciences.v2beta.Disk result = new com.google.cloud.lifesciences.v2beta.Disk(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.lifesciences.v2beta.Disk result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.sizeGb_ = sizeGb_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.type_ = type_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.sourceImage_ = sourceImage_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.lifesciences.v2beta.Disk) { return mergeFrom((com.google.cloud.lifesciences.v2beta.Disk) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.lifesciences.v2beta.Disk other) { if (other == com.google.cloud.lifesciences.v2beta.Disk.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.getSizeGb() != 0) { setSizeGb(other.getSizeGb()); } if (!other.getType().isEmpty()) { type_ = other.type_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getSourceImage().isEmpty()) { sourceImage_ = other.sourceImage_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { sizeGb_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { type_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { sourceImage_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * A user-supplied name for the disk. Used when mounting the disk into * actions. The name must contain only upper and lowercase alphanumeric * characters and hyphens and cannot start with a hyphen. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int sizeGb_; /** * * * <pre> * The size, in GB, of the disk to attach. If the size is not * specified, a default is chosen to ensure reasonable I/O performance. * * If the disk type is specified as `local-ssd`, multiple local drives are * automatically combined to provide the requested size. Note, however, that * each physical SSD is 375GB in size, and no more than 8 drives can be * attached to a single instance. * </pre> * * <code>int32 size_gb = 2;</code> * * @return The sizeGb. */ @java.lang.Override public int getSizeGb() { return sizeGb_; } /** * * * <pre> * The size, in GB, of the disk to attach. If the size is not * specified, a default is chosen to ensure reasonable I/O performance. * * If the disk type is specified as `local-ssd`, multiple local drives are * automatically combined to provide the requested size. Note, however, that * each physical SSD is 375GB in size, and no more than 8 drives can be * attached to a single instance. * </pre> * * <code>int32 size_gb = 2;</code> * * @param value The sizeGb to set. * @return This builder for chaining. */ public Builder setSizeGb(int value) { sizeGb_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The size, in GB, of the disk to attach. If the size is not * specified, a default is chosen to ensure reasonable I/O performance. * * If the disk type is specified as `local-ssd`, multiple local drives are * automatically combined to provide the requested size. Note, however, that * each physical SSD is 375GB in size, and no more than 8 drives can be * attached to a single instance. * </pre> * * <code>int32 size_gb = 2;</code> * * @return This builder for chaining. */ public Builder clearSizeGb() { bitField0_ = (bitField0_ & ~0x00000002); sizeGb_ = 0; onChanged(); return this; } private java.lang.Object type_ = ""; /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @return The type. */ public java.lang.String getType() { java.lang.Object ref = type_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); type_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @return The bytes for type. */ public com.google.protobuf.ByteString getTypeBytes() { java.lang.Object ref = type_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); type_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @param value The type to set. * @return This builder for chaining. */ public Builder setType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @return This builder for chaining. */ public Builder clearType() { type_ = getDefaultInstance().getType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The Compute Engine disk type. If unspecified, `pd-standard` is used. * </pre> * * <code>string type = 3;</code> * * @param value The bytes for type to set. * @return This builder for chaining. */ public Builder setTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object sourceImage_ = ""; /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @return The sourceImage. */ public java.lang.String getSourceImage() { java.lang.Object ref = sourceImage_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceImage_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @return The bytes for sourceImage. */ public com.google.protobuf.ByteString getSourceImageBytes() { java.lang.Object ref = sourceImage_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceImage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @param value The sourceImage to set. * @return This builder for chaining. */ public Builder setSourceImage(java.lang.String value) { if (value == null) { throw new NullPointerException(); } sourceImage_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @return This builder for chaining. */ public Builder clearSourceImage() { sourceImage_ = getDefaultInstance().getSourceImage(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * An optional image to put on the disk before attaching it to the VM. * </pre> * * <code>string source_image = 4;</code> * * @param value The bytes for sourceImage to set. * @return This builder for chaining. */ public Builder setSourceImageBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sourceImage_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.lifesciences.v2beta.Disk) } // @@protoc_insertion_point(class_scope:google.cloud.lifesciences.v2beta.Disk) private static final com.google.cloud.lifesciences.v2beta.Disk DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.lifesciences.v2beta.Disk(); } public static com.google.cloud.lifesciences.v2beta.Disk getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Disk> PARSER = new com.google.protobuf.AbstractParser<Disk>() { @java.lang.Override public Disk parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Disk> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Disk> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.lifesciences.v2beta.Disk getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,084
java-migrationcenter/proto-google-cloud-migrationcenter-v1/src/main/java/com/google/cloud/migrationcenter/v1/DeleteReportConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/migrationcenter/v1/migrationcenter.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.migrationcenter.v1; /** * * * <pre> * A request to delete a ReportConfig. * </pre> * * Protobuf type {@code google.cloud.migrationcenter.v1.DeleteReportConfigRequest} */ public final class DeleteReportConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.migrationcenter.v1.DeleteReportConfigRequest) DeleteReportConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteReportConfigRequest.newBuilder() to construct. private DeleteReportConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteReportConfigRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteReportConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.migrationcenter.v1.MigrationCenterProto .internal_static_google_cloud_migrationcenter_v1_DeleteReportConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.migrationcenter.v1.MigrationCenterProto .internal_static_google_cloud_migrationcenter_v1_DeleteReportConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.class, com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 3; private boolean force_ = false; /** * * * <pre> * Optional. If set to `true`, any child `Reports` of this entity will also be * deleted. If set to `false`, the request only works if the resource has no * children. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (force_ != false) { output.writeBool(3, force_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest)) { return super.equals(obj); } com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest other = (com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getForce() != other.getForce()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request to delete a ReportConfig. * </pre> * * Protobuf type {@code google.cloud.migrationcenter.v1.DeleteReportConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.migrationcenter.v1.DeleteReportConfigRequest) com.google.cloud.migrationcenter.v1.DeleteReportConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.migrationcenter.v1.MigrationCenterProto .internal_static_google_cloud_migrationcenter_v1_DeleteReportConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.migrationcenter.v1.MigrationCenterProto .internal_static_google_cloud_migrationcenter_v1_DeleteReportConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.class, com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.Builder.class); } // Construct using com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.migrationcenter.v1.MigrationCenterProto .internal_static_google_cloud_migrationcenter_v1_DeleteReportConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest getDefaultInstanceForType() { return com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest build() { com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest buildPartial() { com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest result = new com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.force_ = force_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest) { return mergeFrom((com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest other) { if (other == com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { force_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of the resource. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean force_; /** * * * <pre> * Optional. If set to `true`, any child `Reports` of this entity will also be * deleted. If set to `false`, the request only works if the resource has no * children. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * Optional. If set to `true`, any child `Reports` of this entity will also be * deleted. If set to `false`, the request only works if the resource has no * children. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set to `true`, any child `Reports` of this entity will also be * deleted. If set to `false`, the request only works if the resource has no * children. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000004); force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.migrationcenter.v1.DeleteReportConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.migrationcenter.v1.DeleteReportConfigRequest) private static final com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest(); } public static com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteReportConfigRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteReportConfigRequest>() { @java.lang.Override public DeleteReportConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteReportConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteReportConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.migrationcenter.v1.DeleteReportConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
34,960
substratevm/src/com.oracle.svm.test/src/com/oracle/svm/test/clinit/TestClassInitialization.java
/* * Copyright (c) 2019, 2019, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.svm.test.clinit; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import org.graalvm.nativeimage.hosted.Feature; import org.graalvm.nativeimage.hosted.RuntimeClassInitialization; import com.oracle.svm.core.NeverInline; import com.oracle.svm.hosted.fieldfolding.IsStaticFinalFieldInitializedNode; import jdk.internal.misc.Unsafe; /* * The suffix of class names indicates the time when the class initializer is determined to be safe * for simulation at image build time. * * - MustBeSimulated: The simulation of class initializer succeeded, i.e., the class starts out as initialized at run time. * * - MustBeDelayed: The class initializer has side effects, it must be executed at run time. * * The suffixes are checked in the feature at build time (code in this file), at run time in the * main method (code in this file), and by an external script that parses the class initialization * log output (code in mx_substratevm.py). */ class PureMustBeSimulated { static int v; static { v = 1; v = 42; } } class InitializesPureMustBeDelayed { static int v; static { v = PureMustBeSimulated.v; } } /** * The class initializer of this type is actually never executed because static final int fields * assigned directly at the field definition site are already constant folded by javac at all usage * sites. */ class NonPureAccessedFinal { static final int v = 1; static { if (alwaysTrue()) { throw new RuntimeException("Must not be called at runtime or compile time."); } } static boolean alwaysTrue() { return true; } } class PureCallMustBeSimulated { static int v; static { v = TestClassInitialization.pure(); } } class NonPureMustBeDelayed { static int v = 1; static { System.out.println("Delaying NonPureMustBeDelayed"); } } class InitializesNonPureMustBeDelayed { static int v = NonPureMustBeDelayed.v; } class SystemPropReadMustBeDelayed { static int v = 1; static { System.getProperty("test"); } } class SystemPropWriteMustBeDelayed { static int v = 1; static { System.setProperty("test", ""); } } class StartsAThreadMustBeDelayed { static int v = 1; static { new Thread().start(); } } class CreatesAnExceptionMustBeDelayed { static Exception e; static { e = new Exception("should fire at runtime"); } } class ThrowsAnExceptionUninitializedMustBeDelayed { static int v = 1; static { if (PureMustBeSimulated.v == 42) { throw new RuntimeException("should fire at runtime"); } } } interface PureInterfaceMustBeSimulated { } class PureSubclassMustBeDelayed extends SuperClassMustBeDelayed { static int v = 1; } class SuperClassMustBeDelayed implements PureInterfaceMustBeSimulated { static { System.out.println("Delaying SuperClassMustBeDelayed"); } } interface InterfaceNonPureMustBeDelayed { int v = B.v; class B { static int v = 1; static { System.out.println("Delaying InterfaceNonPureMustBeDelayed"); } } } interface InterfaceNonPureDefaultMustBeDelayed { int v = B.v; class B { static int v = 1; static { System.out.println("Delaying InterfaceNonPureDefaultMustBeDelayed"); } } default int m() { return v; } } class PureSubclassInheritsDelayedInterfaceMustBeSimulated implements InterfaceNonPureMustBeDelayed { static int v = 1; } class PureSubclassInheritsDelayedDefaultInterfaceMustBeDelayed implements InterfaceNonPureDefaultMustBeDelayed { static int v = 1; } class ImplicitExceptionInInitializerUninitializedMustBeDelayed { static int a = 10; static int b = 0; static int res; static { res = a / b; } } class PureDependsOnImplicitExceptionUninitializedMustBeDelayed { static int a; static { a = ImplicitExceptionInInitializerUninitializedMustBeDelayed.res; } } class StaticFieldHolderMustBeSimulated { /** * Other class initializers that modify {@link #a} must not run at image build time so that the * initial value 111 assigned here can be read at run time. */ static int a = 111; static void setA(int value) { a = value; } } class StaticFieldModifer1MustBeDelayed { static { StaticFieldHolderMustBeSimulated.a = 222; } static void triggerInitialization() { } } class StaticFieldModifer2MustBeDelayed { static { StaticFieldHolderMustBeSimulated.setA(333); } static void triggerInitialization() { } } class RecursionInInitializerMustBeSimulated { static int i = compute(200); static int compute(int n) { if (n <= 1) { return 1; } else { return n + compute(n - 1); } } } class UnsafeAccessMustBeSimulated { static UnsafeAccessMustBeSimulated value = compute(); int f01; int f02; int f03; int f04; int f05; int f06; int f07; int f08; int f09; int f10; int f11; int f12; int f13; int f14; int f15; int f16; static UnsafeAccessMustBeSimulated compute() { UnsafeAccessMustBeSimulated result = new UnsafeAccessMustBeSimulated(); /* * We are writing a random instance field, depending on the header size. But the object is * big enough so that the write is one of the fields. The unsafe write is converted to a * proper store field node because the offset is constant, so in the static analysis graph * there is no unsafe access node. */ Unsafe.getUnsafe().putInt(result, 32L, 1234); return result; } } enum EnumMustBeSimulated { V1(null), V2("Hello"), V3(new Object()); final Object value; EnumMustBeSimulated(Object value) { this.value = value; } Object getValue() { /* * Use an assertion, so that the static final field that stores the assertion status is * filled in the class initializer. We want to test that using assertions does not impact * the class initialization analysis. */ assert value != null; return value; } } class NativeMethodMustBeDelayed { static int i = compute(); static int compute() { try { nativeMethod(); } catch (LinkageError ignored) { /* Expected since the native method is not implemented. */ } return 42; } static native void nativeMethod(); static void foo() { /* * Even when a class is initialized at run time, the check whether assertions are included * must be constant folded at image build time. Otherwise we have a performance problem. */ assert assertionOnlyCode(); } static boolean assertionOnlyCode() { AssertionOnlyClassMustBeUnreachable.reference(); return false; } } class AssertionOnlyClassMustBeUnreachable { static void reference() { } } /** * Cycle between this class and a helper class. */ class CycleMustBeSimulated { static { HelperClassMustBeSimulated.foo(); } static void foo() { } } class HelperClassMustBeSimulated { static { CycleMustBeSimulated.foo(); } static void foo() { } } /** Various reflection lookup methods are safe for execution at image build time. */ class ReflectionMustBeSimulated { static Class<?> c1; static Class<?> c2; static Method m1; static Field f2; static { try { Class<?> c1Local = Class.forName("com.oracle.svm.test.clinit.ForNameMustBeSimulated", true, ReflectionMustBeSimulated.class.getClassLoader()); c1 = c1Local; /** * Looking up a class that cannot be initialized at build time is allowed, as long as * `initialize` is `false`. */ Class<?> c2Local = Class.forName("com.oracle.svm.test.clinit.ForNameUninitializedMustBeDelayed", false, ReflectionMustBeSimulated.class.getClassLoader()); c2 = c2Local; /* * GR-51519: Calling getDeclaredMethod on the field c1 instead of the variable c1Local * would not work, the ReflectionPlugins do not see through simulated image heap * constants for the parameterTypes array yet. */ m1 = c1Local.getDeclaredMethod("foo", int.class); f2 = c2Local.getDeclaredField("field"); /* * Check that reflective class lookup and the elimination of the class initialization * check also works when the class name is not constant yet during bytecode parsing. */ if (c1Local != Class.forName(forNameMustBeSimulated(), true, ReflectionMustBeSimulated.class.getClassLoader())) { throw new Error("wrong class"); } } catch (ReflectiveOperationException ex) { throw new Error(ex); } } private static String forNameMustBeSimulated() { return "com.oracle.svm.test.clinit.ForNameMustBeSimulated"; } } @SuppressWarnings("unused") class ForNameMustBeSimulated { static void foo(int arg) { } } class ForNameUninitializedMustBeDelayed { static { System.out.println("Delaying ForNameUninitializedMustBeDelayed"); } int field; } class DevirtualizedCallMustBeDelayed { static { System.out.println("Delaying DevirtualizedCallMustBeDelayed"); } static final Object value = 42; } class DevirtualizedCallSuperMustBeSimulated { Object foo() { return -1; } } class DevirtualizedCallSubMustBeSimulated extends DevirtualizedCallSuperMustBeSimulated { @Override Object foo() { return DevirtualizedCallMustBeDelayed.value; } } class DevirtualizedCallUsageMustBeDelayed { static final Object value = computeValue(); private static Object computeValue() { DevirtualizedCallSuperMustBeSimulated provider = createProvider(); /* * The static analysis can prove that DevirtualizedCallSubMustBeDelayed.foo is the only * callee and de-virtualize this call. So the original target method of the call site and * the actually invoked method are different - and the analysis that automatically * initializes classes must properly pick up this dependency. */ return provider.foo(); } private static DevirtualizedCallSuperMustBeSimulated createProvider() { return new DevirtualizedCallSubMustBeSimulated(); } } class LargeAllocation1MustBeDelayed { static final Object value = computeValue(); private static Object computeValue() { return new Object[200_000]; } } class LargeAllocation2MustBeDelayed { static final Object value = computeValue(); private static Object computeValue() { return new int[1][200_000]; } } enum ComplexEnumMustBeSimulated { V1 { @Override int virtualMethod() { return 41; } }, V2 { @Override int virtualMethod() { return 42; } }; abstract int virtualMethod(); static final Map<String, ComplexEnumMustBeSimulated> lookup; static { lookup = new HashMap<>(); for (var v : values()) { lookup.put(v.name(), v); } } } class StaticFinalFieldFoldingMustBeSimulated { Object f1; Object f2; Object f3; StaticFinalFieldFoldingMustBeSimulated() { this.f1 = F1; this.f2 = F2; this.f3 = F3; } static final StaticFinalFieldFoldingMustBeSimulated before = new StaticFinalFieldFoldingMustBeSimulated(); /** * Field value is stored in the class file attribute, so it is available even before this * assignment. */ static final String F1 = "abc"; /** * Field is optimized by our {@link IsStaticFinalFieldInitializedNode static final field folding * feature}. */ static final Object F2 = "abc"; /** Just a regular field. */ static final Object F3 = new String[]{"abc"}; static final StaticFinalFieldFoldingMustBeSimulated after = new StaticFinalFieldFoldingMustBeSimulated(); } class LambdaMustBeSimulated { private static final Predicate<String> IS_AUTOMATIC = s -> s.equals("Hello"); static boolean matches(List<String> l) { return l.stream().anyMatch(IS_AUTOMATIC); } } @SuppressWarnings("deprecation") class BoxingMustBeSimulated { static Integer i1 = 41; static Integer i2 = new Integer(42); static int sum = i1 + i2; static final Map<Class<?>, Object> defaultValues = new HashMap<>(); static { defaultValues.put(boolean.class, Boolean.FALSE); defaultValues.put(byte.class, (byte) 0); defaultValues.put(short.class, (short) 0); defaultValues.put(int.class, 0); defaultValues.put(long.class, 0L); defaultValues.put(char.class, '\0'); defaultValues.put(float.class, 0.0F); defaultValues.put(double.class, 0.0); } public static Object defaultValue(Class<?> clazz) { return defaultValues.get(clazz); } static Object S1; static Object O1; static Object O2; static { short[] shorts = {42, 43, 44, 45, 46, 47, 48}; S1 = new short[12]; System.arraycopy(shorts, 1, S1, 2, 5); System.arraycopy(S1, 3, S1, 5, 5); Object[] objects = {"42", null, "44", "45", null, "47", "48"}; O1 = Arrays.copyOf(objects, 3); O2 = Arrays.copyOfRange(objects, 3, 6, String[].class); } } class SingleByteFieldMustBeSimulated { static SingleByteFieldMustBeSimulated instance1 = new SingleByteFieldMustBeSimulated((byte) 42); static SingleByteFieldMustBeSimulated instance2 = new SingleByteFieldMustBeSimulated((byte) -42); byte b; SingleByteFieldMustBeSimulated(byte b) { this.b = b; } } class SynchronizedMustBeSimulated { static Vector<String> vector; static { /* * Using the normally disallowed "old" synchronized collection classes is the easiest way to * test what we want to test. */ // Checkstyle: stop vector = new Vector<>(); // Checkstyle: resume for (int i = 0; i < 42; i++) { vector.add(String.valueOf(i)); } } } class SynchronizedMustBeDelayed { static { synchronizedMethod(); } /** * This method synchronizes on an object that exists before class initialization is started: the * class object itself. So we cannot determine at image build time if the class initializer * would ever finish execution at image run time. Another thread could hold the lock * indefinitely. */ static synchronized int synchronizedMethod() { return 42; } } class InitializationOrder { static final List<Class<?>> initializationOrder = Collections.synchronizedList(new ArrayList<>()); } interface Test1_I1 { default void defaultI1() { } int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test1_I1.class); return 42; } } interface Test1_I2 extends Test1_I1 { int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test1_I2.class); return 42; } } interface Test1_I3 extends Test1_I2 { default void defaultI3() { } int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test1_I3.class); return 42; } } interface Test1_I4 extends Test1_I3 { int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test1_I4.class); return 42; } } class Test1_A implements Test1_I4 { static { InitializationOrder.initializationOrder.add(Test1_A.class); } } interface Test2_I1 { default void defaultI1() { } int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test2_I1.class); return 42; } } interface Test2_I2 extends Test2_I1 { int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test2_I2.class); return 42; } } interface Test2_I3 extends Test2_I2 { default void defaultI3() { } int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test2_I3.class); return 42; } } interface Test2_I4 extends Test2_I3 { int order = add(); static int add() { InitializationOrder.initializationOrder.add(Test2_I4.class); return 42; } } class TestClassInitializationFeature implements Feature { private static void checkClasses() { System.out.println("=== Checking initialization state of classes"); List<String> errors = new ArrayList<>(); for (Class<?> checkedClass : TestClassInitialization.checkedClasses) { boolean nameHasSimulated = checkedClass.getName().contains("MustBeSimulated"); boolean nameHasDelayed = checkedClass.getName().contains("MustBeDelayed"); if ((nameHasSimulated ? 1 : 0) + (nameHasDelayed ? 1 : 0) != 1) { errors.add(checkedClass.getName() + ": Wrongly named class: nameHasSimulated=" + nameHasSimulated + ", nameHasDelayed=" + nameHasDelayed); } else if (!Unsafe.getUnsafe().shouldBeInitialized(checkedClass)) { errors.add(checkedClass.getName() + ": Class already initialized at image build time"); } } if (!errors.isEmpty()) { throw new Error(errors.stream().collect(Collectors.joining(System.lineSeparator()))); } } @Override public void afterRegistration(AfterRegistrationAccess access) { /* We need to access the checkedClasses array both at image build time and run time. */ RuntimeClassInitialization.initializeAtBuildTime(TestClassInitialization.class); /* * Initialization of a class first triggers initialization of all superinterfaces that * declared default methods. */ InitializationOrder.initializationOrder.clear(); assertNotInitialized(Test1_I1.class, Test1_I2.class, Test1_I3.class, Test1_I4.class, Test1_A.class); RuntimeClassInitialization.initializeAtBuildTime(Test1_A.class); assertNotInitialized(Test1_I2.class, Test1_I4.class); assertInitialized(Test1_I1.class, Test1_I3.class, Test1_A.class); assertArraysEqual(new Object[]{Test1_I1.class, Test1_I3.class, Test1_A.class}, InitializationOrder.initializationOrder.toArray()); /* * Initialization of an interface does not trigger initialization of superinterfaces. * Regardless whether any of the involved interfaces declare default methods. */ InitializationOrder.initializationOrder.clear(); assertNotInitialized(Test2_I1.class, Test2_I2.class, Test2_I3.class, Test2_I4.class); RuntimeClassInitialization.initializeAtBuildTime(Test2_I4.class); assertNotInitialized(Test2_I1.class, Test2_I2.class, Test2_I3.class); assertInitialized(Test2_I4.class); assertArraysEqual(new Object[]{Test2_I4.class}, InitializationOrder.initializationOrder.toArray()); RuntimeClassInitialization.initializeAtBuildTime(Test2_I3.class); assertNotInitialized(Test2_I1.class, Test2_I2.class); assertInitialized(Test2_I3.class, Test2_I4.class); assertArraysEqual(new Object[]{Test2_I4.class, Test2_I3.class}, InitializationOrder.initializationOrder.toArray()); RuntimeClassInitialization.initializeAtBuildTime(Test2_I2.class); assertNotInitialized(Test2_I1.class); assertInitialized(Test2_I2.class, Test2_I3.class, Test2_I4.class); assertArraysEqual(new Object[]{Test2_I4.class, Test2_I3.class, Test2_I2.class}, InitializationOrder.initializationOrder.toArray()); } private static void assertNotInitialized(Class<?>... classes) { for (var clazz : classes) { if (!Unsafe.getUnsafe().shouldBeInitialized(clazz)) { throw new AssertionError("Already initialized: " + clazz); } } } private static void assertInitialized(Class<?>... classes) { for (var clazz : classes) { if (Unsafe.getUnsafe().shouldBeInitialized(clazz)) { throw new AssertionError("Not initialized: " + clazz); } } } private static void assertArraysEqual(Object[] expected, Object[] actual) { if (!Arrays.equals(expected, actual)) { throw new RuntimeException("expected " + Arrays.toString(expected) + " but found " + Arrays.toString(actual)); } } @Override public void beforeAnalysis(BeforeAnalysisAccess access) { checkClasses(); } @Override public void duringAnalysis(DuringAnalysisAccess access) { checkClasses(); } @Override public void afterAnalysis(AfterAnalysisAccess access) { if (access.isReachable(AssertionOnlyClassMustBeUnreachable.class)) { throw new Error("Assertion check was not constant folded for a class that is initialized at run time. " + "We assume here that the image is built with assertions disabled, which is the case for the gate check."); } } @Override public void beforeCompilation(BeforeCompilationAccess access) { checkClasses(); } @Override public void afterImageWrite(AfterImageWriteAccess access) { checkClasses(); } } public class TestClassInitialization { static final Class<?>[] checkedClasses = new Class<?>[]{ PureMustBeSimulated.class, NonPureMustBeDelayed.class, PureCallMustBeSimulated.class, InitializesNonPureMustBeDelayed.class, SystemPropReadMustBeDelayed.class, SystemPropWriteMustBeDelayed.class, StartsAThreadMustBeDelayed.class, CreatesAnExceptionMustBeDelayed.class, ThrowsAnExceptionUninitializedMustBeDelayed.class, PureInterfaceMustBeSimulated.class, PureSubclassMustBeDelayed.class, SuperClassMustBeDelayed.class, InterfaceNonPureMustBeDelayed.class, InterfaceNonPureDefaultMustBeDelayed.class, PureSubclassInheritsDelayedInterfaceMustBeSimulated.class, PureSubclassInheritsDelayedDefaultInterfaceMustBeDelayed.class, ImplicitExceptionInInitializerUninitializedMustBeDelayed.class, PureDependsOnImplicitExceptionUninitializedMustBeDelayed.class, StaticFieldHolderMustBeSimulated.class, StaticFieldModifer1MustBeDelayed.class, StaticFieldModifer2MustBeDelayed.class, RecursionInInitializerMustBeSimulated.class, UnsafeAccessMustBeSimulated.class, EnumMustBeSimulated.class, NativeMethodMustBeDelayed.class, CycleMustBeSimulated.class, HelperClassMustBeSimulated.class, ReflectionMustBeSimulated.class, ForNameMustBeSimulated.class, ForNameUninitializedMustBeDelayed.class, DevirtualizedCallMustBeDelayed.class, DevirtualizedCallSuperMustBeSimulated.class, DevirtualizedCallSubMustBeSimulated.class, DevirtualizedCallUsageMustBeDelayed.class, LargeAllocation1MustBeDelayed.class, LargeAllocation2MustBeDelayed.class, ComplexEnumMustBeSimulated.class, StaticFinalFieldFoldingMustBeSimulated.class, LambdaMustBeSimulated.class, BoxingMustBeSimulated.class, SingleByteFieldMustBeSimulated.class, SynchronizedMustBeSimulated.class, SynchronizedMustBeDelayed.class, }; static int pure() { return transitivelyPure() + 42; } private static int transitivelyPure() { return 42; } /* * Since {@link Function} is a core JDK type that is always marked as * "initialize at build time", it is allowed to have a lambda for it in the image heap. */ static Function<String, String> buildTimeLambda = TestClassInitialization::duplicate; static String duplicate(String s) { return s + s; } public static void main(String[] args) { for (var checkedClass : checkedClasses) { boolean nameHasSimulated = checkedClass.getName().contains("MustBeSimulated"); boolean nameHasDelayed = checkedClass.getName().contains("MustBeDelayed"); boolean initialized = !Unsafe.getUnsafe().shouldBeInitialized(checkedClass); if (nameHasDelayed == initialized) { throw new RuntimeException("Class " + checkedClass.getName() + ": nameHasSimulated=" + nameHasSimulated + ", nameHasDelayed=" + nameHasDelayed + ", initialized=" + initialized); } } assertTrue("123123".equals(buildTimeLambda.apply("123"))); assertSame(42, PureMustBeSimulated.v); assertSame(84, PureCallMustBeSimulated.v); assertSame(42, InitializesPureMustBeDelayed.v); assertSame(1, NonPureMustBeDelayed.v); assertSame(1, NonPureAccessedFinal.v); assertSame(1, InitializesNonPureMustBeDelayed.v); assertSame(1, SystemPropReadMustBeDelayed.v); assertSame(1, SystemPropWriteMustBeDelayed.v); assertSame(1, StartsAThreadMustBeDelayed.v); assertSame(1, PureSubclassMustBeDelayed.v); assertSame(1, PureSubclassInheritsDelayedInterfaceMustBeSimulated.v); assertSame(1, PureSubclassInheritsDelayedDefaultInterfaceMustBeDelayed.v); assertSame(1, InterfaceNonPureMustBeDelayed.v); try { sink(ThrowsAnExceptionUninitializedMustBeDelayed.v); throw new RuntimeException("should not reach here"); } catch (ExceptionInInitializerError e) { assertSame("should fire at runtime", e.getCause().getMessage()); } assertSame("should fire at runtime", CreatesAnExceptionMustBeDelayed.e.getMessage()); try { sink(ImplicitExceptionInInitializerUninitializedMustBeDelayed.res); throw new RuntimeException("should not reach here"); } catch (ExceptionInInitializerError e) { assertSame(ArithmeticException.class, e.getCause().getClass()); } try { sink(PureDependsOnImplicitExceptionUninitializedMustBeDelayed.a); throw new RuntimeException("should not reach here"); } catch (NoClassDefFoundError e) { /* Expected. */ } assertSame(111, StaticFieldHolderMustBeSimulated.a); StaticFieldModifer1MustBeDelayed.triggerInitialization(); assertSame(222, StaticFieldHolderMustBeSimulated.a); StaticFieldModifer2MustBeDelayed.triggerInitialization(); assertSame(333, StaticFieldHolderMustBeSimulated.a); assertSame(20100, RecursionInInitializerMustBeSimulated.i); UnsafeAccessMustBeSimulated value = UnsafeAccessMustBeSimulated.value; assertSame(1234, value.f01 + value.f02 + value.f03 + value.f04 + value.f05 + value.f06 + value.f07 + value.f08 + value.f09 + value.f10 + value.f11 + value.f12 + value.f13 + value.f14 + value.f15 + value.f16); EnumMustBeSimulated[] values = EnumMustBeSimulated.values(); assertSame(null, values[0].getValue()); assertSame("Hello", values[1].getValue()); assertSame(Object.class, values[2].getValue().getClass()); assertSame(EnumMustBeSimulated.V1, stringToEnum("v1")); assertSame(42, NativeMethodMustBeDelayed.i); NativeMethodMustBeDelayed.foo(); CycleMustBeSimulated.foo(); assertSame(ForNameMustBeSimulated.class, ReflectionMustBeSimulated.c1); assertSame(ForNameUninitializedMustBeDelayed.class, ReflectionMustBeSimulated.c2); assertSame("foo", ReflectionMustBeSimulated.m1.getName()); assertSame("field", ReflectionMustBeSimulated.f2.getName()); assertSame(42, DevirtualizedCallUsageMustBeDelayed.value); assertSame(200_000, ((Object[]) LargeAllocation1MustBeDelayed.value).length); assertSame(1, ((int[][]) LargeAllocation2MustBeDelayed.value).length); assertSame(200_000, ((int[][]) LargeAllocation2MustBeDelayed.value)[0].length); assertSame(ComplexEnumMustBeSimulated.V1, ComplexEnumMustBeSimulated.lookup.get("V1")); assertSame(42, ComplexEnumMustBeSimulated.lookup.get("V2").virtualMethod()); assertSame("abc", StaticFinalFieldFoldingMustBeSimulated.before.f1); assertSame(null, StaticFinalFieldFoldingMustBeSimulated.before.f2); assertSame(null, StaticFinalFieldFoldingMustBeSimulated.before.f3); assertSame("abc", StaticFinalFieldFoldingMustBeSimulated.after.f1); assertSame("abc", StaticFinalFieldFoldingMustBeSimulated.after.f2); assertSame(1, ((Object[]) StaticFinalFieldFoldingMustBeSimulated.after.f3).length); assertSame(true, LambdaMustBeSimulated.matches(List.of("1", "2", "3", "Hello", "4"))); assertSame(false, LambdaMustBeSimulated.matches(List.of("1", "2", "3", "4"))); assertSame(83, BoxingMustBeSimulated.sum); assertSame(Character.class, BoxingMustBeSimulated.defaultValue(char.class).getClass()); assertSame(Short.class, BoxingMustBeSimulated.defaultValue(short.class).getClass()); assertSame(Float.class, BoxingMustBeSimulated.defaultValue(float.class).getClass()); assertTrue(Arrays.equals((short[]) BoxingMustBeSimulated.S1, new short[]{0, 0, 43, 44, 45, 44, 45, 46, 47, 0, 0, 0})); assertTrue(Arrays.equals((Object[]) BoxingMustBeSimulated.O1, new Object[]{"42", null, "44"})); assertTrue(Arrays.equals((Object[]) BoxingMustBeSimulated.O2, new String[]{"45", null, "47"})); /* * The unsafe field offset lookup is constant folded at image build time, which also * registers the field as unsafe accessed. */ long bOffset = Unsafe.getUnsafe().objectFieldOffset(SingleByteFieldMustBeSimulated.class, "b"); assertTrue(bOffset % 4 == 0); /* * Check that for sub-int values, the padding after the value is not touched by the image * heap writer. */ assertSame(42, readRawByte(SingleByteFieldMustBeSimulated.instance1, bOffset + 0)); assertSame(0, readRawByte(SingleByteFieldMustBeSimulated.instance1, bOffset + 1)); assertSame(0, readRawByte(SingleByteFieldMustBeSimulated.instance1, bOffset + 2)); assertSame(0, readRawByte(SingleByteFieldMustBeSimulated.instance1, bOffset + 3)); assertSame(-42, readRawByte(SingleByteFieldMustBeSimulated.instance2, bOffset + 0)); assertSame(0, readRawByte(SingleByteFieldMustBeSimulated.instance2, bOffset + 1)); assertSame(0, readRawByte(SingleByteFieldMustBeSimulated.instance2, bOffset + 2)); assertSame(0, readRawByte(SingleByteFieldMustBeSimulated.instance2, bOffset + 3)); assertSame(42, SynchronizedMustBeSimulated.vector.size()); assertSame(42, SynchronizedMustBeDelayed.synchronizedMethod()); for (var checkedClass : checkedClasses) { boolean initialized = !Unsafe.getUnsafe().shouldBeInitialized(checkedClass); boolean expectedUninitialized = checkedClass.getName().contains("Uninitialized"); if (initialized == expectedUninitialized) { throw new RuntimeException("Class " + checkedClass.getName() + ": initialized=" + initialized + ", expectedUninitialized=" + expectedUninitialized); } } } @NeverInline("prevent constant folding, we read the raw memory after the last field") static int readRawByte(Object o, long offset) { return Unsafe.getUnsafe().getByte(o, offset); } private static EnumMustBeSimulated stringToEnum(String name) { if (EnumMustBeSimulated.V1.name().equalsIgnoreCase(name)) { return EnumMustBeSimulated.V1; } else { return EnumMustBeSimulated.V2; } } private static void assertTrue(boolean condition) { if (!condition) { throw new RuntimeException("condition not true"); } } private static void assertSame(long expected, long actual) { if (expected != actual) { throw new RuntimeException("expected " + expected + " but found " + actual); } } private static void assertSame(Object expected, Object actual) { if (expected != actual) { throw new RuntimeException("expected " + expected + " but found " + actual); } } private static void sink(@SuppressWarnings("unused") Object o) { } }
oracle/nosql
34,959
kvmain/src/main/java/oracle/kv/impl/api/table/ArrayValueImpl.java
/*- * Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.kv.impl.api.table; import static oracle.kv.impl.api.table.TableJsonUtils.jsonParserGetDecimalValue; import java.io.DataOutput; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.math.BigDecimal; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import oracle.kv.impl.api.table.ValueSerializer.ArrayValueSerializer; import oracle.kv.impl.api.table.ValueSerializer.FieldValueSerializer; import oracle.kv.impl.util.SizeOf; import oracle.kv.table.ArrayDef; import oracle.kv.table.ArrayValue; import oracle.kv.table.FieldDef; import oracle.kv.table.FieldValue; import oracle.kv.table.MapValue; import oracle.kv.table.RecordValue; import com.fasterxml.jackson.core.JsonLocation; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser.NumberType; import com.fasterxml.jackson.core.JsonToken; /** * ArrayValueImpl implements the ArrayValue interface to hold an object of * type ArrayDef. */ public class ArrayValueImpl extends ComplexValueImpl implements ArrayValue, ArrayValueSerializer { private static final long serialVersionUID = 1L; private final ArrayList<FieldValueImpl> array = new ArrayList<>(); /* * Support for homogenous arrays of valid scalars in wildcard arrays, * which are arrays of ANY, JSON, ANY_ATOMIC, or ANY_JSON_ATOMIC. * * This internal feature exists so that storage and serialization can * be optimized for homogenous arrays of scalars. Specifically, we don't * want to store an extra byte per element to specify the type of the * element. We don't track homogeneity for non scalara because the space * savings are smaller, and the cpu cost of tracking is higher. * * Rules: * 1. If homogeneousType is non-null, this is an array that may contain * mixed elements, but which currently contains scalars only, all * having homogeneousType as their type. Otherwise, this is either a * "typed" array (it's elementDef is not a wildcard) or a non- * homogeneous wildcard array. * 2. Public APIs will always see this as an array of wildcard (e.g. JSON) * 3. There are internal APIs to access the homogenous type. These are * used in this class and in other classes that need to know (for now, * FieldValueSerialization only). * * Empty arrays of wildcards always start with homogeneousType == null. * The following are the valid transitions for arrays of wildcard: * * 1. empty array -> homogeneous array * Empty arrays of wildcards always start with homogeneousType == null. * On first insertion, if the type of inserted element is scalar, it is * stored in this.homogeneousType. * * 2. homogeneous array -> non-homogeneous array. * If an element whose type is not the same as this.homogeneousType is * inserted, this.homogeneousType is set to null, making the array a * non-homogeneous one. * * These transitions are handled in trackHomogenousType(). */ private FieldDefImpl homogeneousType; /* The flag is set when the array is constructed by an ArrayConstrIter, * whose theIsConditional flag is true. It is used/needed when a query has * array_collect(expr) with generic (not index-based) group-by and the input * expr of the array_collect() returns more than 1 value. Generic group-by * means that the query execution plan at the RNs will have a GroupIter over * an SFWIter. The input expr of the array_collect() is computed by the * SFWIter. But if this expr returns more than one items, we have to put * these values into a conditionally-contructed array in order to construct * the record returned by the SFWIter (because we don't have a class that * represents a sequence of values). Then, the GroupIter needs to know if * an array it receives as the input value for array_collect() is an * "original" array (e.g., an array that existed in an table row), or an * "artificial" array that was constructed by the SFWIter. In the former * case, the "original" array is included as-is into the array constructed * by array_collect(). In later case, the "artificial" array is unboxed and * its elements are inserted in the array constructed by array_collect(). */ private transient boolean isConditionallyConstructed; ArrayValueImpl(ArrayDef def) { super(def); } @Override protected ValueType getValueType() { return ValueType.ARRAY_VALUE; } @Override public void writeFastExternal(DataOutput out, short serialVersion) { /* * This class is only used during query return and is not serialized * with the table metadata otherwise. So there is no need to support * FastExternal. */ fastExternalNotSupported(); } /* * Public api methods from Object and FieldValue */ @Override public ArrayValueImpl clone() { ArrayValueImpl newArray = new ArrayValueImpl(getDefinition()); for (FieldValue val : array) { newArray.add(val.clone()); } newArray.homogeneousType = homogeneousType; return newArray; } @Override public long sizeof() { long size = super.sizeof(); size += (2 * SizeOf.OBJECT_REF_OVERHEAD + SizeOf.ARRAYLIST_OVERHEAD + SizeOf.objectArraySize(array.size())); for (FieldValue elem : array) { size += ((FieldValueImpl)elem).sizeof(); } return size; } @Override public int hashCode() { int code = size(); for (FieldValue val : array) { code += val.hashCode(); } return code; } @Override public boolean equals(Object other) { if (other instanceof ArrayValueImpl) { ArrayValueImpl otherValue = (ArrayValueImpl) other; /* maybe avoid some work */ if (this == otherValue) { return true; } /* * detailed comparison */ if (size() == otherValue.size() && getDefinition().equals(otherValue.getDefinition()) && (homogeneousType == null || homogeneousType.equals(otherValue.getHomogeneousType()))) { for (int i = 0; i < size(); i++) { if (!get(i).equals(otherValue.get(i))) { return false; } } return true; } } return false; } /** * FieldDef must match. * * Compare field values in array order. Return as soon as there is a * difference. If this object has a field the other does not, return &gt; * 0. If this object is missing a field the other has, return &lt; 0. */ @Override public int compareTo(FieldValue other) { if (other instanceof ArrayValueImpl) { ArrayValueImpl otherImpl = (ArrayValueImpl) other; if (!getDefinition().equals(otherImpl.getDefinition())) { throw new IllegalArgumentException ("Cannot compare ArrayValues with different definitions"); } for (int i = 0; i < size(); i++) { FieldValueImpl val = get(i); if (otherImpl.size() < i + 1) { return 1; } int ret = val.compareTo(otherImpl.get(i)); if (ret != 0) { return ret; } } /* they must be equal */ return 0; } throw new ClassCastException("Object is not an ArrayValue"); } @Override public FieldDef.Type getType() { return FieldDef.Type.ARRAY; } @Override public boolean isArray() { return true; } @Override public ArrayValue asArray() { return this; } /* * Public api methods from ArrayValue */ @Override public ArrayDefImpl getDefinition() { return (ArrayDefImpl)fieldDef; } @Override public FieldValueImpl get(int index) { return array.get(index); } @Override public FieldValueImpl getElement(int index) { return array.get(index); } @Override public int size() { return array.size(); } @Override public List<FieldValue> toList() { return Collections.unmodifiableList(array); } @Override public ArrayValue add(FieldValue value) { value = validate(value, getElementDef()); array.add((FieldValueImpl)value); trackHomogeneousType(value); return this; } @Override public ArrayValue add(int index, FieldValue value) { value = validate(value, getElementDef()); array.add(index, (FieldValueImpl)value); trackHomogeneousType(value); return this; } public void addAll(ArrayValueImpl arr) { array.addAll(arr.getArrayInternal()); } @Override public ArrayValue set(int index, FieldValue value) { value = validate(value, getElementDef()); array.set(index, (FieldValueImpl)value); trackHomogeneousType(value); return this; } /** * Integer */ @Override public ArrayValue add(int value) { addScalar(getElementDef().createInteger(value)); return this; } @Override public ArrayValue add(int values[]) { FieldDefImpl edef = getElementDef(); for (int i : values) { addScalar(edef.createInteger(i)); } return this; } @Override public ArrayValue add(int index, int value) { addScalar(index, getElementDef().createInteger(value)); return this; } @Override public ArrayValue set(int index, int value) { setScalar(index, getElementDef().createInteger(value)); return this; } /** * Long */ @Override public ArrayValue add(long value) { addScalar(getElementDef().createLong(value)); return this; } @Override public ArrayValue add(long values[]) { FieldDef edef = getElementDef(); for (long l : values) { addScalar(edef.createLong(l)); } return this; } @Override public ArrayValue add(int index, long value) { addScalar(index, getElementDef().createLong(value)); return this; } @Override public ArrayValue set(int index, long value) { setScalar(index, getElementDef().createLong(value)); return this; } /** * String */ @Override public ArrayValue add(String value) { addScalar(getElementDef().createString(value)); return this; } @Override public ArrayValue add(String values[]) { FieldDef edef = getElementDef(); for (String s : values) { addScalar(edef.createString(s)); } return this; } @Override public ArrayValue add(int index, String value) { addScalar(index, getElementDef().createString(value)); return this; } @Override public ArrayValue set(int index, String value) { setScalar(index, getElementDef().createString(value)); return this; } /** * Double */ @Override public ArrayValue add(double value) { addScalar(getElementDef().createDouble(value)); return this; } @Override public ArrayValue add(double values[]) { FieldDef edef = getElementDef(); for (double d : values) { addScalar(edef.createDouble(d)); } return this; } @Override public ArrayValue add(int index, double value) { addScalar(index, getElementDef().createDouble(value)); return this; } @Override public ArrayValue set(int index, double value) { setScalar(index, getElementDef().createDouble(value)); return this; } /** * Float */ @Override public ArrayValue add(float value) { addScalar(getElementDef().createFloat(value)); return this; } @Override public ArrayValue add(float values[]) { FieldDefImpl edef = getElementDef(); for (float d : values) { addScalar(edef.createFloat(d)); } return this; } @Override public ArrayValue add(int index, float value) { addScalar(index, getElementDef().createFloat(value)); return this; } @Override public ArrayValue set(int index, float value) { setScalar(index, getElementDef().createFloat(value)); return this; } /* * BigDecimal */ @Override public ArrayValue addNumber(int value) { addScalar(getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(int values[]) { FieldDef def = getElementDef(); for (int val : values) { addScalar(def.createNumber(val)); } return this; } @Override public ArrayValue addNumber(int index, int value) { addScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue setNumber(int index, int value) { setScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(long value) { addScalar(getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(long values[]) { FieldDef def = getElementDef(); for (long val : values) { addScalar(def.createNumber(val)); } return this; } @Override public ArrayValue addNumber(int index, long value) { addScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue setNumber(int index, long value) { setScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(float value) { addScalar(getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(float values[]) { FieldDef def = getElementDef(); for (float val : values) { addScalar(def.createNumber(val)); } return this; } @Override public ArrayValue addNumber(int index, float value) { addScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue setNumber(int index, float value) { setScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(double value) { addScalar(getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(double values[]) { FieldDef def = getElementDef(); for (double val : values) { addScalar(def.createNumber(val)); } return this; } @Override public ArrayValue addNumber(int index, double value) { addScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue setNumber(int index, double value) { setScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(BigDecimal value) { addScalar(getElementDef().createNumber(value)); return this; } @Override public ArrayValue addNumber(BigDecimal values[]) { FieldDef def = getElementDef(); for (BigDecimal bd : values) { addScalar(def.createNumber(bd)); } return this; } @Override public ArrayValue addNumber(int index, BigDecimal value) { addScalar(index, getElementDef().createNumber(value)); return this; } @Override public ArrayValue setNumber(int index, BigDecimal value) { setScalar(index, getElementDef().createNumber(value)); return this; } /** * Boolean */ @Override public ArrayValue add(boolean value) { addScalar(getElementDef().createBoolean(value)); return this; } @Override public ArrayValue add(boolean values[]) { FieldDef edef = getElementDef(); for (boolean b : values) { addScalar(edef.createBoolean(b)); } return this; } @Override public ArrayValue add(int index, boolean value) { addScalar(index, getElementDef().createBoolean(value)); return this; } @Override public ArrayValue set(int index, boolean value) { setScalar(index, getElementDef().createBoolean(value)); return this; } /** * Binary */ @Override public ArrayValue add(byte[] value) { addScalar(getElementDef().createBinary(value)); return this; } @Override public ArrayValue add(byte[] values[]) { FieldDef edef = getElementDef(); for (byte[] b : values) { addScalar(edef.createBinary(b)); } return this; } @Override public ArrayValue add(int index, byte[] value) { addScalar(index, getElementDef().createBinary(value)); return this; } @Override public ArrayValue set(int index, byte[] value) { setScalar(index, getElementDef().createBinary(value)); return this; } /** * FixedBinary */ @Override public ArrayValue addFixed(byte[] value) { addScalar(getElementDef().createFixedBinary(value)); return this; } @Override public ArrayValue addFixed(byte[] values[]) { FieldDef edef = getElementDef(); for (byte[] b : values) { addScalar(edef.createFixedBinary(b)); } return this; } @Override public ArrayValue addFixed(int index, byte[] value) { addScalar(index, getElementDef().createFixedBinary(value)); return this; } @Override public ArrayValue setFixed(int index, byte[] value) { setScalar(index, getElementDef().createFixedBinary(value)); return this; } /** * Enum */ @Override public ArrayValue addEnum(String value) { addScalar(getElementDef().createEnum(value)); return this; } @Override public ArrayValue addEnum(String values[]) { FieldDef edef = getElementDef(); for (String s : values) { addScalar(edef.createEnum(s)); } return this; } @Override public ArrayValue addEnum(int index, String value) { addScalar(index, getElementDef().createEnum(value)); return this; } @Override public ArrayValue setEnum(int index, String value) { setScalar(index, getElementDef().createEnum(value)); return this; } /** * Timestamp */ @Override public ArrayValue add(Timestamp value) { addScalar(getElementDef().createTimestamp(value)); return this; } @Override public ArrayValue add(Timestamp values[]) { FieldDef def = getElementDef(); for (Timestamp v : values) { addScalar(def.createTimestamp(v)); } return this; } @Override public ArrayValue add(int index, Timestamp value) { addScalar(index, getElementDef().createTimestamp(value)); return this; } @Override public ArrayValue set(int index, Timestamp value) { addScalar(index, getElementDef().createTimestamp(value)); return this; } /** * JSON Null */ @Override public ArrayValue addJsonNull() { addScalar(getElementDef().createJsonNull()); return this; } @Override public ArrayValue addJsonNull(int index) { addScalar(index, getElementDef().createJsonNull()); return this; } @Override public ArrayValue setJsonNull(int index) { setScalar(index, getElementDef().createJsonNull()); return this; } /* * Record */ @Override public RecordValue setRecord(int index) { RecordValueImpl val = getElementDef().createRecord(); array.set(index, val); clearHomogeneousType(); return val; } @Override public RecordValueImpl addRecord() { RecordValueImpl val = getElementDef().createRecord(); array.add(val); clearHomogeneousType(); return val; } @Override public RecordValue addRecord(int index) { RecordValueImpl val = getElementDef().createRecord(); array.add(index, val); clearHomogeneousType(); return val; } /* * Map */ @Override public MapValue setMap(int index) { MapValueImpl val = getElementDef().createMap(); array.set(index, val); clearHomogeneousType(); return val; } @Override public MapValueImpl addMap() { MapValueImpl val = getElementDef().createMap(); array.add(val); clearHomogeneousType(); return val; } @Override public MapValue addMap(int index) { MapValueImpl val = getElementDef().createMap(); array.add(index, val); clearHomogeneousType(); return val; } /* * Array */ @Override public ArrayValue setArray(int index) { ArrayValueImpl val = getElementDef().createArray(); array.set(index, val); clearHomogeneousType(); return val; } @Override public ArrayValueImpl addArray() { ArrayValueImpl val = getElementDef().createArray(); array.add(val); clearHomogeneousType(); return val; } @Override public ArrayValue addArray(int index) { ArrayValueImpl val = getElementDef().createArray(); array.add(index, val); clearHomogeneousType(); return val; } /* * JSON */ @Override public ArrayValueImpl addJson(String jsonInput) { Reader reader = new StringReader(jsonInput); try { return addJson(reader); } finally { try { reader.close(); } catch (IOException ioe) {} } } @Override public ArrayValueImpl addJson(Reader jsonReader) { add(JsonDefImpl.createFromReader(jsonReader)); return this; } @Override public ArrayValueImpl addJson(int index, String jsonInput) { Reader reader = new StringReader(jsonInput); try { return addJson(index, reader); } finally { try { reader.close(); } catch (IOException ioe) {} } } @Override public ArrayValueImpl addJson(int index, Reader jsonReader) { add(index, JsonDefImpl.createFromReader(jsonReader)); return this; } @Override public ArrayValueImpl setJson(int index, String jsonInput) { Reader reader = new StringReader(jsonInput); try { return setJson(index, reader); } finally { try { reader.close(); } catch (IOException ioe) {} } } @Override public ArrayValueImpl setJson(int index, Reader jsonReader) { set(index, JsonDefImpl.createFromReader(jsonReader)); return this; } /* * Methods from ComplexValueImpl */ /** * Parse a JSON array and put the extracted values into "this" array. */ @Override public void addJsonFields( JsonParser jp, String fieldName, boolean exact, boolean addMissingFields) { try { FieldDef element = getElementDef(); JsonToken t = jp.currentToken(); JsonLocation location = jp.currentLocation(); if (t != JsonToken.START_ARRAY) { jsonParseException(("Expected [ token to start array, instead " + "found " + t), location); } while ((t = jp.nextToken()) != JsonToken.END_ARRAY) { if (t == null || t == JsonToken.END_OBJECT) { jsonParseException("Did not find end of array", location); } /* * Handle null. */ if (jp.getCurrentToken() == JsonToken.VALUE_NULL && !element.isJson()) { throw new IllegalArgumentException ("Invalid null value in JSON input for array"); } switch (element.getType()) { case INTEGER: checkNumberType(null, NumberType.INT, jp); add(jp.getIntValue()); break; case LONG: checkNumberType(null, NumberType.LONG, jp); add(jp.getLongValue()); break; case DOUBLE: checkNumberType(null, NumberType.DOUBLE, jp); add(jp.getDoubleValue()); break; case FLOAT: checkNumberType(null, NumberType.FLOAT, jp); add(jp.getFloatValue()); break; case NUMBER: checkNumberType(null, NumberType.BIG_DECIMAL, jp); addNumber(jsonParserGetDecimalValue(jp)); break; case STRING: add(jp.getText()); break; case BINARY: add(jp.getBinaryValue()); break; case FIXED_BINARY: addFixed(jp.getBinaryValue()); break; case BOOLEAN: add(jp.getBooleanValue()); break; case TIMESTAMP: add(element.asTimestamp().fromString(jp.getText())); break; case ARRAY: ArrayValueImpl array1 = addArray(); array1.addJsonFields(jp, null, exact, addMissingFields); break; case MAP: MapValueImpl map = addMap(); map.addJsonFields(jp, null, exact, addMissingFields); break; case RECORD: RecordValueImpl record = addRecord(); record.addJsonFields(jp, null, exact, addMissingFields); break; case ENUM: addEnum(jp.getText()); break; case JSON: case ANY_JSON_ATOMIC: array.add((FieldValueImpl)JsonDefImpl.createFromJson(jp, false)); break; case ANY: case ANY_ATOMIC: case ANY_RECORD: case EMPTY: case GEOMETRY: case POINT: case JSON_INT_MRCOUNTER: case JSON_LONG_MRCOUNTER: case JSON_NUM_MRCOUNTER: throw new IllegalStateException( "An array type cannot have " + element.getType() + " as its element type"); } } } catch (IOException ioe) { throw new IllegalArgumentException (("Failed to parse JSON input: " + ioe.getMessage()), ioe); } catch (RuntimeException re) { if (re instanceof IllegalArgumentException) { throw re; } throw new IllegalArgumentException (("Failed to parse JSON input: " + re.toString()), re); } } /* * FieldValueImpl internal api methods */ /** * Increment the value of the array element, not the array. There * can only be one element in this array. */ @Override public FieldValueImpl getNextValue() { if (size() != 1) { throw new IllegalArgumentException ("Array values used in ranges must contain only one element"); } ArrayValueImpl newArray = new ArrayValueImpl(getDefinition()); FieldValueImpl fvi = get(0).getNextValue(); newArray.add(fvi); return newArray; } @Override public FieldValueImpl getMinimumValue() { if (size() != 1) { throw new IllegalArgumentException ("Array values used in ranges must contain only one element"); } ArrayValueImpl newArray = new ArrayValueImpl(getDefinition()); FieldValueImpl fvi = get(0).getMinimumValue(); newArray.add(fvi); return newArray; } @Override public void toStringBuilder(StringBuilder sb, DisplayFormatter formatter) { if (formatter == null) { throw new IllegalArgumentException( "DisplayFormatter must be non-null"); } sb.append('['); for (int i = 0; i < array.size(); i++) { if (i > 0) { formatter.comma(sb); } FieldValueImpl value = array.get(i); value.toStringBuilder(sb, formatter); } sb.append(']'); } @SuppressWarnings("unchecked") static ArrayValueImpl fromJavaObjectValue(FieldDef def, Object o) { Iterable<Object> coll = null; if (o instanceof Iterable<?>) { coll = (Iterable<Object>) o; } else { coll = Arrays.asList((Object[]) o); } ArrayValueImpl newArray = (ArrayValueImpl)def.createArray(); for (Object value : coll) { newArray.add(FieldValueImpl.fromJavaObjectValue( newArray.getElementDef(), value)); } return newArray; } /* * Local methods */ public void clear() { array.clear(); } public void remove(int pos) { array.remove(pos); } public boolean isConditionallyConstructed() { return isConditionallyConstructed; } public void setConditionallyConstructed(boolean v) { isConditionallyConstructed = v; } /* * These next 3 exist to consolidate valid insertions. */ private ArrayValue addScalar(FieldValue value) { assert ((FieldDefImpl)value.getDefinition()).isSubtype(getElementDef()); /* turn float to double */ if (value.isFloat() && getElementDef().isJson()) { value = FieldDefImpl.Constants.doubleDef.createDouble( value.asFloat().get()); } trackHomogeneousType(value); array.add((FieldValueImpl)value); return this; } private ArrayValue addScalar(int index, FieldValue value) { assert ((FieldDefImpl)value.getDefinition()).isSubtype(getElementDef()); /* turn float to double */ if (value.isFloat() && getElementDef().isJson()) { value = FieldDefImpl.Constants.doubleDef.createDouble( value.asFloat().get()); } trackHomogeneousType(value); array.add(index, (FieldValueImpl)value); return this; } private ArrayValue setScalar(int index, FieldValue value) { assert ((FieldDefImpl)value.getDefinition()).isSubtype(getElementDef()); /* turn float to double */ if (value.isFloat() && getElementDef().isJson()) { value = FieldDefImpl.Constants.doubleDef.createDouble( value.asFloat().get()); } trackHomogeneousType(value); array.set(index, (FieldValueImpl)value); return this; } public List<FieldValueImpl> getArrayInternal() { return array; } public FieldDefImpl getElementDef() { return ((ArrayDefImpl)fieldDef).getElement(); } FieldDefImpl getHomogeneousType() { return homogeneousType; } void setHomogeneousType(FieldDefImpl def) { homogeneousType = def; } boolean isHomogeneous() { return homogeneousType != null; } public void addInternal(FieldValueImpl value) { array.add(value); } /** * This is used by index deserialization. The format for enums is an * integer. */ ArrayValue addEnum(int value) { add(((EnumDefImpl)getElementDef()).createEnum(value)); return this; } /** * This method tracks the type of the elements in the array handling * transitions to/from wildcard types. */ private void trackHomogeneousType(FieldValue value) { FieldDefImpl elemDef = getElementDef(); if (!elemDef.isWildcard()) { return; } FieldDefImpl valDef = (FieldDefImpl)value.getDefinition(); if (size() == 0) { /* * transition from empty wildcard array to homogenous wildcard * array. */ assert(homogeneousType == null); if (valDef.isAtomic() && !valDef.isWildcard()) { homogeneousType = valDef; } } else if (homogeneousType != null && homogeneousType.getType() != valDef.getType()) { /* transition from homogenous wildcard to heterogenous wildcard */ homogeneousType = null; } } private void clearHomogeneousType() { homogeneousType = null; } @Override public ArrayValueSerializer asArrayValueSerializer() { return this; } @SuppressWarnings("unchecked") @Override public Iterator<FieldValueSerializer> iterator() { final List<?> values = toList(); return (Iterator<FieldValueSerializer>)values.iterator(); } }
apache/iceberg
35,151
core/src/test/java/org/apache/iceberg/TestReplacePartitions.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg; import static org.apache.iceberg.util.SnapshotUtil.latestSnapshot; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assumptions.assumeThat; import java.io.IOException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.iceberg.ManifestEntry.Status; import org.apache.iceberg.exceptions.ValidationException; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public class TestReplacePartitions extends TestBase { static final DataFile FILE_E = DataFiles.builder(SPEC) .withPath("/path/to/data-e.parquet") .withFileSizeInBytes(0) .withPartitionPath("data_bucket=0") // same partition as FILE_A .withRecordCount(0) .build(); static final DataFile FILE_F = DataFiles.builder(SPEC) .withPath("/path/to/data-f.parquet") .withFileSizeInBytes(0) .withPartitionPath("data_bucket=1") // same partition as FILE_B .withRecordCount(0) .build(); static final DataFile FILE_G = DataFiles.builder(SPEC) .withPath("/path/to/data-g.parquet") .withFileSizeInBytes(0) .withPartitionPath("data_bucket=10") // no other partition .withRecordCount(0) .build(); static final DataFile FILE_UNPARTITIONED_A = DataFiles.builder(PartitionSpec.unpartitioned()) .withPath("/path/to/data-unpartitioned-a.parquet") .withFileSizeInBytes(10) .withRecordCount(1) .build(); static final DataFile FILE_NULL_PARTITION = DataFiles.builder(SPEC) .withPath("/path/to/data-null-partition.parquet") .withFileSizeInBytes(0) .withPartitionPath("data_bucket=__HIVE_DEFAULT_PARTITION__") .withRecordCount(0) .build(); // Partition spec with VOID partition transform ("alwaysNull" in Java code.) static final PartitionSpec SPEC_VOID = PartitionSpec.builderFor(SCHEMA).alwaysNull("id").bucket("data", BUCKETS_NUMBER).build(); static final DataFile FILE_A_VOID_PARTITION = DataFiles.builder(SPEC_VOID) .withPath("/path/to/data-a-void-partition.parquet") .withFileSizeInBytes(10) .withPartitionPath("id_null=__HIVE_DEFAULT_PARTITION__/data_bucket=0") .withRecordCount(1) .build(); static final DataFile FILE_B_VOID_PARTITION = DataFiles.builder(SPEC_VOID) .withPath("/path/to/data-b-void-partition.parquet") .withFileSizeInBytes(10) .withPartitionPath("id_null=__HIVE_DEFAULT_PARTITION__/data_bucket=1") .withRecordCount(10) .build(); static final DeleteFile FILE_UNPARTITIONED_A_DELETES = FileMetadata.deleteFileBuilder(PartitionSpec.unpartitioned()) .ofPositionDeletes() .withPath("/path/to/data-unpartitioned-a-deletes.parquet") .withFileSizeInBytes(10) .withRecordCount(1) .build(); static final PartitionSpec SPEC_ALL_VOID = PartitionSpec.builderFor(SCHEMA).alwaysNull("id").alwaysNull("data").build(); static final DataFile FILE_ALL_VOID_UNPARTITIONED_A = DataFiles.builder(SPEC_ALL_VOID) .withPath("/path/to/data-all-void-unpartitioned-a.parquet") .withFileSizeInBytes(10) .withRecordCount(1) .build(); static final DataFile FILE_ALL_VOID_UNPARTITIONED_B = DataFiles.builder(SPEC_ALL_VOID) .withPath("/path/to/data-all-void-unpartitioned-b.parquet") .withFileSizeInBytes(10) .withRecordCount(1) .build(); @Parameter(index = 1) private String branch; @Parameters(name = "formatVersion = {0}, branch = {1}") protected static List<Object> parameters() { return TestHelpers.ALL_VERSIONS.stream() .flatMap(v -> Stream.of(new Object[] {v, "main"}, new Object[] {v, "branch"})) .collect(Collectors.toList()); } @TestTemplate public void testReplaceOnePartition() { commit(table, table.newFastAppend().appendFile(FILE_A).appendFile(FILE_B), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); commit(table, table.newReplacePartitions().addFile(FILE_E), branch); long replaceId = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(replaceId).isNotEqualTo(baseId); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(2); // manifest is not merged because it is less than the minimum validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(replaceId), files(FILE_E), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(replaceId, baseId), files(FILE_A, FILE_B), statuses(Status.DELETED, Status.EXISTING)); } @TestTemplate public void testReplaceAndMergeOnePartition() { // ensure the overwrite results in a merge table.updateProperties().set(TableProperties.MANIFEST_MIN_MERGE_COUNT, "1").commit(); commit(table, table.newFastAppend().appendFile(FILE_A).appendFile(FILE_B), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); commit(table, table.newReplacePartitions().addFile(FILE_E), branch); long replaceId = latestSnapshot(table, branch).snapshotId(); assertThat(replaceId).isNotEqualTo(baseId); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(1); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(replaceId, replaceId, baseId), files(FILE_E, FILE_A, FILE_B), statuses(Status.ADDED, Status.DELETED, Status.EXISTING)); } @TestTemplate public void testReplaceWithUnpartitionedTable() throws IOException { Table unpartitioned = TestTables.create( tableDir, "unpartitioned", SCHEMA, PartitionSpec.unpartitioned(), formatVersion); assertThat(TestTables.metadataVersion("unpartitioned")).isEqualTo(0); commit(table, unpartitioned.newAppend().appendFile(FILE_A), branch); // make sure the data was successfully added assertThat(TestTables.metadataVersion("unpartitioned")).isEqualTo(1); validateSnapshot( null, latestSnapshot(TestTables.readMetadata("unpartitioned"), branch), FILE_A); ReplacePartitions replacePartitions = unpartitioned.newReplacePartitions().addFile(FILE_B); commit(table, replacePartitions, branch); assertThat(TestTables.metadataVersion("unpartitioned")).isEqualTo(2); TableMetadata replaceMetadata = TestTables.readMetadata("unpartitioned"); long replaceId = latestSnapshot(replaceMetadata, branch).snapshotId(); assertThat(latestSnapshot(replaceMetadata, branch).allManifests(unpartitioned.io())).hasSize(2); validateManifestEntries( latestSnapshot(replaceMetadata, branch).allManifests(unpartitioned.io()).get(0), ids(replaceId), files(FILE_B), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(replaceMetadata, branch).allManifests(unpartitioned.io()).get(1), ids(replaceId), files(FILE_A), statuses(Status.DELETED)); } @TestTemplate public void testReplaceAllVoidUnpartitionedTable() { Table tableVoid = TestTables.create(tableDir, "allvoidUnpartitioned", SCHEMA, SPEC_ALL_VOID, formatVersion); commit(tableVoid, tableVoid.newAppend().appendFile(FILE_ALL_VOID_UNPARTITIONED_A), branch); validateSnapshot( null, latestSnapshot(TestTables.readMetadata("allvoidUnpartitioned"), branch), FILE_ALL_VOID_UNPARTITIONED_A); ReplacePartitions replacePartitions = tableVoid.newReplacePartitions().addFile(FILE_ALL_VOID_UNPARTITIONED_B); commit(tableVoid, replacePartitions, branch); assertThat(TestTables.metadataVersion("allvoidUnpartitioned")).isEqualTo(2); TableMetadata replaceMetadata = TestTables.readMetadata("allvoidUnpartitioned"); long replaceId = latestSnapshot(replaceMetadata, branch).snapshotId(); List<ManifestFile> manifestFiles = latestSnapshot(replaceMetadata, branch).allManifests(tableVoid.io()); assertThat(manifestFiles).hasSize(2); validateManifestEntries( manifestFiles.get(0), ids(replaceId), files(FILE_ALL_VOID_UNPARTITIONED_B), statuses(Status.ADDED)); validateManifestEntries( manifestFiles.get(1), ids(replaceId), files(FILE_ALL_VOID_UNPARTITIONED_A), statuses(Status.DELETED)); } @TestTemplate public void testReplaceAndMergeWithUnpartitionedTable() throws IOException { Table unpartitioned = TestTables.create( tableDir, "unpartitioned", SCHEMA, PartitionSpec.unpartitioned(), formatVersion); // ensure the overwrite results in a merge unpartitioned.updateProperties().set(TableProperties.MANIFEST_MIN_MERGE_COUNT, "1").commit(); assertThat(TestTables.metadataVersion("unpartitioned")).isEqualTo(1); AppendFiles appendFiles = unpartitioned.newAppend().appendFile(FILE_A); commit(table, appendFiles, branch); // make sure the data was successfully added assertThat(TestTables.metadataVersion("unpartitioned")).isEqualTo(2); validateSnapshot( null, latestSnapshot(TestTables.readMetadata("unpartitioned"), branch), FILE_A); ReplacePartitions replacePartitions = unpartitioned.newReplacePartitions().addFile(FILE_B); commit(table, replacePartitions, branch); assertThat(TestTables.metadataVersion("unpartitioned")).isEqualTo(3); TableMetadata replaceMetadata = TestTables.readMetadata("unpartitioned"); long replaceId = latestSnapshot(replaceMetadata, branch).snapshotId(); assertThat(latestSnapshot(replaceMetadata, branch).allManifests(unpartitioned.io())).hasSize(1); validateManifestEntries( latestSnapshot(replaceMetadata, branch).allManifests(unpartitioned.io()).get(0), ids(replaceId, replaceId), files(FILE_B, FILE_A), statuses(Status.ADDED, Status.DELETED)); } @TestTemplate public void testValidationFailure() { commit(table, table.newFastAppend().appendFile(FILE_A).appendFile(FILE_B), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); ReplacePartitions replace = table.newReplacePartitions().addFile(FILE_F).addFile(FILE_G).validateAppendOnly(); assertThatThrownBy(() -> commit(table, replace, branch)) .isInstanceOf(ValidationException.class) .hasMessageStartingWith("Cannot commit file that conflicts with existing partition"); assertThat(latestSnapshot(readMetadata(), branch).snapshotId()).isEqualTo(baseId); } @TestTemplate public void testValidationSuccess() { commit(table, table.newFastAppend().appendFile(FILE_A).appendFile(FILE_B), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); commit(table, table.newReplacePartitions().addFile(FILE_G).validateAppendOnly(), branch); long replaceId = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(replaceId).isNotEqualTo(baseId); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(2); // manifest is not merged because it is less than the minimum validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(replaceId), files(FILE_G), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(baseId, baseId), files(FILE_A, FILE_B), statuses(Status.ADDED, Status.ADDED)); } @TestTemplate public void testValidationNotInvoked() { commit(table, table.newFastAppend().appendFile(FILE_A), branch); TableMetadata base = readMetadata(); // Two concurrent ReplacePartitions with No Validation Enabled commit( table, table .newReplacePartitions() .addFile(FILE_E) .validateFromSnapshot(latestSnapshot(base, branch).snapshotId()), branch); commit( table, table .newReplacePartitions() .addFile(FILE_A) // Replaces FILE_E which becomes Deleted .addFile(FILE_B) .validateFromSnapshot(latestSnapshot(base, branch).snapshotId()), branch); long replaceId = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(2); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(replaceId, replaceId), files(FILE_A, FILE_B), statuses(Status.ADDED, Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(replaceId), files(FILE_E), statuses(Status.DELETED)); } @TestTemplate public void testValidateWithDefaultSnapshotId() { commit(table, table.newReplacePartitions().addFile(FILE_A), branch); // Concurrent Replace Partitions should fail with ValidationException ReplacePartitions replace = table.newReplacePartitions(); assertThatThrownBy( () -> commit( table, replace .addFile(FILE_A) .addFile(FILE_B) .validateNoConflictingData() .validateNoConflictingDeletes(), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching partitions " + "[data_bucket=0, data_bucket=1]: [/path/to/data-a.parquet]"); } @TestTemplate public void testValidateWithNullPartition() { commit(table, table.newReplacePartitions().addFile(FILE_NULL_PARTITION), branch); // Concurrent Replace Partitions should fail with ValidationException ReplacePartitions replace = table.newReplacePartitions(); assertThatThrownBy( () -> commit( table, replace .addFile(FILE_NULL_PARTITION) .addFile(FILE_B) .validateNoConflictingData() .validateNoConflictingDeletes(), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching partitions " + "[data_bucket=null, data_bucket=1]: [/path/to/data-null-partition.parquet]"); } @TestTemplate public void testValidateWithVoidTransform() throws IOException { Table tableVoid = TestTables.create(tableDir, "tablevoid", SCHEMA, SPEC_VOID, formatVersion); commit(tableVoid, tableVoid.newReplacePartitions().addFile(FILE_A_VOID_PARTITION), branch); // Concurrent Replace Partitions should fail with ValidationException ReplacePartitions replace = tableVoid.newReplacePartitions(); assertThatThrownBy( () -> commit( tableVoid, replace .addFile(FILE_A_VOID_PARTITION) .addFile(FILE_B_VOID_PARTITION) .validateNoConflictingData() .validateNoConflictingDeletes(), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching partitions " + "[id_null=null, data_bucket=1, id_null=null, data_bucket=0]: " + "[/path/to/data-a-void-partition.parquet]"); } @TestTemplate public void testConcurrentReplaceConflict() { commit(table, table.newFastAppend().appendFile(FILE_A).appendFile(FILE_B), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); // Concurrent Replace Partitions should fail with ValidationException commit(table, table.newReplacePartitions().addFile(FILE_A), branch); assertThatThrownBy( () -> commit( table, table .newReplacePartitions() .validateFromSnapshot(baseId) .addFile(FILE_A) .addFile(FILE_B) .validateNoConflictingData() .validateNoConflictingDeletes(), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching partitions " + "[data_bucket=0, data_bucket=1]: [/path/to/data-a.parquet]"); } @TestTemplate public void testConcurrentReplaceNoConflict() { commit(table, table.newFastAppend().appendFile(FILE_A), branch); TableMetadata base = readMetadata(); long id1 = latestSnapshot(base, branch).snapshotId(); // Concurrent Replace Partitions should not fail if concerning different partitions commit(table, table.newReplacePartitions().addFile(FILE_A), branch); long id2 = latestSnapshot(readMetadata(), branch).snapshotId(); commit( table, table .newReplacePartitions() .validateFromSnapshot(id1) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_B), branch); long id3 = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(2); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(id3), files(FILE_B), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(id2), files(FILE_A), statuses(Status.ADDED)); } @TestTemplate public void testConcurrentReplaceConflictNonPartitioned() { Table unpartitioned = TestTables.create( tableDir, "unpartitioned", SCHEMA, PartitionSpec.unpartitioned(), formatVersion); commit(table, unpartitioned.newAppend().appendFile(FILE_UNPARTITIONED_A), branch); TableMetadata replaceMetadata = TestTables.readMetadata("unpartitioned"); long replaceBaseId = latestSnapshot(replaceMetadata, branch).snapshotId(); // Concurrent ReplacePartitions should fail with ValidationException commit(table, unpartitioned.newReplacePartitions().addFile(FILE_UNPARTITIONED_A), branch); assertThatThrownBy( () -> commit( table, unpartitioned .newReplacePartitions() .validateFromSnapshot(replaceBaseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_UNPARTITIONED_A), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching true: " + "[/path/to/data-unpartitioned-a.parquet]"); } @TestTemplate public void testAppendReplaceConflict() { commit(table, table.newFastAppend().appendFile(FILE_A), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); // Concurrent Append and ReplacePartition should fail with ValidationException commit(table, table.newFastAppend().appendFile(FILE_B), branch); assertThatThrownBy( () -> commit( table, table .newReplacePartitions() .validateFromSnapshot(baseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_A) .addFile(FILE_B), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching partitions " + "[data_bucket=0, data_bucket=1]: [/path/to/data-b.parquet]"); } @TestTemplate public void testAppendReplaceNoConflict() { commit(table, table.newFastAppend().appendFile(FILE_A), branch); TableMetadata base = readMetadata(); long id1 = latestSnapshot(base, branch).snapshotId(); // Concurrent Append and ReplacePartition should not conflict if concerning different partitions commit(table, table.newFastAppend().appendFile(FILE_B), branch); long id2 = latestSnapshot(readMetadata(), branch).snapshotId(); commit( table, table .newReplacePartitions() .validateFromSnapshot(id1) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_A), branch); long id3 = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(3); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(id3), files(FILE_A), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(id2), files(FILE_B), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(2), ids(id3), files(FILE_A), statuses(Status.DELETED)); } @TestTemplate public void testAppendReplaceConflictNonPartitioned() { Table unpartitioned = TestTables.create( tableDir, "unpartitioned", SCHEMA, PartitionSpec.unpartitioned(), formatVersion); commit(table, unpartitioned.newAppend().appendFile(FILE_UNPARTITIONED_A), branch); TableMetadata replaceMetadata = TestTables.readMetadata("unpartitioned"); long replaceBaseId = latestSnapshot(replaceMetadata, branch).snapshotId(); // Concurrent Append and ReplacePartitions should fail with ValidationException commit(table, unpartitioned.newAppend().appendFile(FILE_UNPARTITIONED_A), branch); assertThatThrownBy( () -> commit( table, unpartitioned .newReplacePartitions() .validateFromSnapshot(replaceBaseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_UNPARTITIONED_A), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting files that can contain records matching true: " + "[/path/to/data-unpartitioned-a.parquet]"); } @TestTemplate public void testDeleteReplaceConflict() { assumeThat(formatVersion).isEqualTo(2); commit(table, table.newFastAppend().appendFile(FILE_A), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); // Concurrent Delete and ReplacePartition should fail with ValidationException commit( table, table.newRowDelta().addDeletes(FILE_A_DELETES).validateFromSnapshot(baseId), branch); assertThatThrownBy( () -> commit( table, table .newReplacePartitions() .validateFromSnapshot(baseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_A), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found new conflicting delete files that can apply to records matching " + "[data_bucket=0]: [/path/to/data-a-deletes.parquet]"); } @TestTemplate public void testDeleteReplaceConflictNonPartitioned() { assumeThat(formatVersion).isEqualTo(2); Table unpartitioned = TestTables.create( tableDir, "unpartitioned", SCHEMA, PartitionSpec.unpartitioned(), formatVersion); commit(table, unpartitioned.newAppend().appendFile(FILE_A), branch); TableMetadata replaceMetadata = TestTables.readMetadata("unpartitioned"); long replaceBaseId = latestSnapshot(replaceMetadata, branch).snapshotId(); // Concurrent Delete and ReplacePartitions should fail with ValidationException commit(table, unpartitioned.newRowDelta().addDeletes(FILE_UNPARTITIONED_A_DELETES), branch); assertThatThrownBy( () -> commit( table, unpartitioned .newReplacePartitions() .validateFromSnapshot(replaceBaseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_UNPARTITIONED_A), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found new conflicting delete files that can apply to records matching true: " + "[/path/to/data-unpartitioned-a-deletes.parquet]"); } @TestTemplate public void testDeleteReplaceNoConflict() { assumeThat(formatVersion).isEqualTo(2); commit(table, table.newFastAppend().appendFile(FILE_A), branch); long id1 = latestSnapshot(readMetadata(), branch).snapshotId(); // Concurrent Delta and ReplacePartition should not conflict if concerning different partitions commit( table, table .newRowDelta() .addDeletes(FILE_A_DELETES) .validateFromSnapshot(id1) .validateNoConflictingDataFiles() .validateNoConflictingDeleteFiles() .validateFromSnapshot(id1), branch); long id2 = latestSnapshot(readMetadata(), branch).snapshotId(); commit( table, table .newReplacePartitions() .validateNoConflictingData() .validateNoConflictingDeletes() .validateFromSnapshot(id1) .addFile(FILE_B), branch); long id3 = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(3); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(id3), files(FILE_B), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(id1), files(FILE_A), statuses(Status.ADDED)); validateDeleteManifest( latestSnapshot(table, branch).allManifests(table.io()).get(2), dataSeqs(2L), fileSeqs(2L), ids(id2), files(FILE_A_DELETES), statuses(Status.ADDED)); } @TestTemplate public void testOverwriteReplaceConflict() { assumeThat(formatVersion).isEqualTo(2); commit(table, table.newFastAppend().appendFile(FILE_A), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); // Concurrent Overwrite and ReplacePartition should fail with ValidationException commit(table, table.newOverwrite().deleteFile(FILE_A), branch); assertThatThrownBy( () -> commit( table, table .newReplacePartitions() .validateFromSnapshot(baseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_A), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting deleted files that can apply to records matching " + "[data_bucket=0]: [/path/to/data-a.parquet]"); } @TestTemplate public void testOverwriteReplaceNoConflict() { assumeThat(formatVersion).isEqualTo(2); commit(table, table.newFastAppend().appendFile(FILE_A).appendFile(FILE_B), branch); TableMetadata base = readMetadata(); long baseId = latestSnapshot(base, branch).snapshotId(); // Concurrent Overwrite and ReplacePartition should not fail with if concerning different // partitions commit(table, table.newOverwrite().deleteFile(FILE_A), branch); commit( table, table .newReplacePartitions() .validateNoConflictingData() .validateNoConflictingDeletes() .validateFromSnapshot(baseId) .addFile(FILE_B), branch); long finalId = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(2); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(finalId), files(FILE_B), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(finalId), files(FILE_B), statuses(Status.DELETED)); } @TestTemplate public void testOverwriteReplaceConflictNonPartitioned() { assumeThat(formatVersion).isEqualTo(2); Table unpartitioned = TestTables.create( tableDir, "unpartitioned", SCHEMA, PartitionSpec.unpartitioned(), formatVersion); commit(table, unpartitioned.newAppend().appendFile(FILE_UNPARTITIONED_A), branch); TableMetadata replaceMetadata = TestTables.readMetadata("unpartitioned"); long replaceBaseId = latestSnapshot(replaceMetadata, branch).snapshotId(); // Concurrent Overwrite and ReplacePartitions should fail with ValidationException commit(table, unpartitioned.newOverwrite().deleteFile(FILE_UNPARTITIONED_A), branch); assertThatThrownBy( () -> commit( table, unpartitioned .newReplacePartitions() .validateFromSnapshot(replaceBaseId) .validateNoConflictingData() .validateNoConflictingDeletes() .addFile(FILE_UNPARTITIONED_A), branch)) .isInstanceOf(ValidationException.class) .hasMessage( "Found conflicting deleted files that can contain records matching true: " + "[/path/to/data-unpartitioned-a.parquet]"); } @TestTemplate public void testValidateOnlyDeletes() { commit(table, table.newAppend().appendFile(FILE_A), branch); long baseId = latestSnapshot(readMetadata(), branch).snapshotId(); // Snapshot Isolation mode: appends do not conflict with replace commit(table, table.newAppend().appendFile(FILE_B), branch); commit( table, table .newReplacePartitions() .validateFromSnapshot(baseId) .validateNoConflictingDeletes() .addFile(FILE_B), branch); long finalId = latestSnapshot(readMetadata(), branch).snapshotId(); assertThat(latestSnapshot(table, branch).allManifests(table.io())).hasSize(3); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(0), ids(finalId), files(FILE_B), statuses(Status.ADDED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(1), ids(finalId), files(FILE_B), statuses(Status.DELETED)); validateManifestEntries( latestSnapshot(table, branch).allManifests(table.io()).get(2), ids(baseId), files(FILE_A), statuses(Status.ADDED)); } @TestTemplate public void testEmptyPartitionPathWithUnpartitionedTable() { DataFiles.builder(PartitionSpec.unpartitioned()).withPartitionPath(""); } @TestTemplate public void replacingAndMergingOnePartitionAlsoRemovesDV() { assumeThat(formatVersion).isGreaterThanOrEqualTo(3); // ensure the overwrite results in a merge table.updateProperties().set(TableProperties.MANIFEST_MIN_MERGE_COUNT, "1").commit(); commit( table, table .newRowDelta() .addRows(FILE_A) .addRows(FILE_B) .addDeletes(fileADeletes()) .addDeletes(fileBDeletes()), branch); Snapshot snapshot = latestSnapshot(table, branch); // FILE_E has the same partition as FILE_A. The dv for FILE_A will be removed from the delete // manifest commit(table, table.newReplacePartitions().addFile(FILE_E), branch); Snapshot replaceSnapshot = latestSnapshot(table, branch); assertThat(replaceSnapshot.dataManifests(table.io())).hasSize(1); assertThat(replaceSnapshot.deleteManifests(table.io())).hasSize(1); validateManifestEntries( replaceSnapshot.dataManifests(table.io()).get(0), ids(replaceSnapshot.snapshotId(), replaceSnapshot.snapshotId(), snapshot.snapshotId()), files(FILE_E, FILE_A, FILE_B), statuses(Status.ADDED, Status.DELETED, Status.EXISTING)); validateDeleteManifest( replaceSnapshot.deleteManifests(table.io()).get(0), dataSeqs(1L, 1L), fileSeqs(1L, 1L), ids(replaceSnapshot.snapshotId(), snapshot.snapshotId()), files(fileADeletes(), fileBDeletes()), statuses(Status.DELETED, Status.EXISTING)); } }
openjdk/jdk8
35,190
jdk/src/share/classes/sun/security/pkcs/PKCS7.java
/* * Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.pkcs; import java.io.*; import java.math.BigInteger; import java.net.URI; import java.util.*; import java.security.cert.X509Certificate; import java.security.cert.CertificateException; import java.security.cert.X509CRL; import java.security.cert.CRLException; import java.security.cert.CertificateFactory; import java.security.*; import sun.security.timestamp.*; import sun.security.util.*; import sun.security.x509.AlgorithmId; import sun.security.x509.X509CertImpl; import sun.security.x509.X509CertInfo; import sun.security.x509.X509CRLImpl; import sun.security.x509.X500Name; /** * PKCS7 as defined in RSA Laboratories PKCS7 Technical Note. Profile * Supports only <tt>SignedData</tt> ContentInfo * type, where to the type of data signed is plain Data. * For signedData, <tt>crls</tt>, <tt>attributes</tt> and * PKCS#6 Extended Certificates are not supported. * * @author Benjamin Renaud */ public class PKCS7 { private ObjectIdentifier contentType; // the ASN.1 members for a signedData (and other) contentTypes private BigInteger version = null; private AlgorithmId[] digestAlgorithmIds = null; private ContentInfo contentInfo = null; private X509Certificate[] certificates = null; private X509CRL[] crls = null; private SignerInfo[] signerInfos = null; private boolean oldStyle = false; // Is this JDK1.1.x-style? private Principal[] certIssuerNames; /* * Random number generator for creating nonce values * (Lazy initialization) */ private static class SecureRandomHolder { static final SecureRandom RANDOM; static { SecureRandom tmp = null; try { tmp = SecureRandom.getInstance("SHA1PRNG"); } catch (NoSuchAlgorithmException e) { // should not happen } RANDOM = tmp; } } /* * Object identifier for the timestamping key purpose. */ private static final String KP_TIMESTAMPING_OID = "1.3.6.1.5.5.7.3.8"; /* * Object identifier for extendedKeyUsage extension */ private static final String EXTENDED_KEY_USAGE_OID = "2.5.29.37"; /** * Unmarshals a PKCS7 block from its encoded form, parsing the * encoded bytes from the InputStream. * * @param in an input stream holding at least one PKCS7 block. * @exception ParsingException on parsing errors. * @exception IOException on other errors. */ public PKCS7(InputStream in) throws ParsingException, IOException { DataInputStream dis = new DataInputStream(in); byte[] data = new byte[dis.available()]; dis.readFully(data); parse(new DerInputStream(data)); } /** * Unmarshals a PKCS7 block from its encoded form, parsing the * encoded bytes from the DerInputStream. * * @param derin a DerInputStream holding at least one PKCS7 block. * @exception ParsingException on parsing errors. */ public PKCS7(DerInputStream derin) throws ParsingException { parse(derin); } /** * Unmarshals a PKCS7 block from its encoded form, parsing the * encoded bytes. * * @param bytes the encoded bytes. * @exception ParsingException on parsing errors. */ public PKCS7(byte[] bytes) throws ParsingException { try { DerInputStream derin = new DerInputStream(bytes); parse(derin); } catch (IOException ioe1) { ParsingException pe = new ParsingException( "Unable to parse the encoded bytes"); pe.initCause(ioe1); throw pe; } } /* * Parses a PKCS#7 block. */ private void parse(DerInputStream derin) throws ParsingException { try { derin.mark(derin.available()); // try new (i.e., JDK1.2) style parse(derin, false); } catch (IOException ioe) { try { derin.reset(); // try old (i.e., JDK1.1.x) style parse(derin, true); oldStyle = true; } catch (IOException ioe1) { ParsingException pe = new ParsingException( ioe1.getMessage()); pe.initCause(ioe); pe.addSuppressed(ioe1); throw pe; } } } /** * Parses a PKCS#7 block. * * @param derin the ASN.1 encoding of the PKCS#7 block. * @param oldStyle flag indicating whether or not the given PKCS#7 block * is encoded according to JDK1.1.x. */ private void parse(DerInputStream derin, boolean oldStyle) throws IOException { contentInfo = new ContentInfo(derin, oldStyle); contentType = contentInfo.contentType; DerValue content = contentInfo.getContent(); if (contentType.equals((Object)ContentInfo.SIGNED_DATA_OID)) { parseSignedData(content); } else if (contentType.equals((Object)ContentInfo.OLD_SIGNED_DATA_OID)) { // This is for backwards compatibility with JDK 1.1.x parseOldSignedData(content); } else if (contentType.equals((Object) ContentInfo.NETSCAPE_CERT_SEQUENCE_OID)){ parseNetscapeCertChain(content); } else { throw new ParsingException("content type " + contentType + " not supported."); } } /** * Construct an initialized PKCS7 block. * * @param digestAlgorithmIds the message digest algorithm identifiers. * @param contentInfo the content information. * @param certificates an array of X.509 certificates. * @param crls an array of CRLs * @param signerInfos an array of signer information. */ public PKCS7(AlgorithmId[] digestAlgorithmIds, ContentInfo contentInfo, X509Certificate[] certificates, X509CRL[] crls, SignerInfo[] signerInfos) { version = BigInteger.ONE; this.digestAlgorithmIds = digestAlgorithmIds; this.contentInfo = contentInfo; this.certificates = certificates; this.crls = crls; this.signerInfos = signerInfos; } public PKCS7(AlgorithmId[] digestAlgorithmIds, ContentInfo contentInfo, X509Certificate[] certificates, SignerInfo[] signerInfos) { this(digestAlgorithmIds, contentInfo, certificates, null, signerInfos); } private void parseNetscapeCertChain(DerValue val) throws ParsingException, IOException { DerInputStream dis = new DerInputStream(val.toByteArray()); DerValue[] contents = dis.getSequence(2); certificates = new X509Certificate[contents.length]; CertificateFactory certfac = null; try { certfac = CertificateFactory.getInstance("X.509"); } catch (CertificateException ce) { // do nothing } for (int i=0; i < contents.length; i++) { ByteArrayInputStream bais = null; try { if (certfac == null) certificates[i] = new X509CertImpl(contents[i]); else { byte[] encoded = contents[i].toByteArray(); bais = new ByteArrayInputStream(encoded); certificates[i] = (X509Certificate)certfac.generateCertificate(bais); bais.close(); bais = null; } } catch (CertificateException ce) { ParsingException pe = new ParsingException(ce.getMessage()); pe.initCause(ce); throw pe; } catch (IOException ioe) { ParsingException pe = new ParsingException(ioe.getMessage()); pe.initCause(ioe); throw pe; } finally { if (bais != null) bais.close(); } } } private void parseSignedData(DerValue val) throws ParsingException, IOException { DerInputStream dis = val.toDerInputStream(); // Version version = dis.getBigInteger(); // digestAlgorithmIds DerValue[] digestAlgorithmIdVals = dis.getSet(1); int len = digestAlgorithmIdVals.length; digestAlgorithmIds = new AlgorithmId[len]; try { for (int i = 0; i < len; i++) { DerValue oid = digestAlgorithmIdVals[i]; digestAlgorithmIds[i] = AlgorithmId.parse(oid); } } catch (IOException e) { ParsingException pe = new ParsingException("Error parsing digest AlgorithmId IDs: " + e.getMessage()); pe.initCause(e); throw pe; } // contentInfo contentInfo = new ContentInfo(dis); CertificateFactory certfac = null; try { certfac = CertificateFactory.getInstance("X.509"); } catch (CertificateException ce) { // do nothing } /* * check if certificates (implicit tag) are provided * (certificates are OPTIONAL) */ if ((byte)(dis.peekByte()) == (byte)0xA0) { DerValue[] certVals = dis.getSet(2, true); len = certVals.length; certificates = new X509Certificate[len]; int count = 0; for (int i = 0; i < len; i++) { ByteArrayInputStream bais = null; try { byte tag = certVals[i].getTag(); // We only parse the normal certificate. Other types of // CertificateChoices ignored. if (tag == DerValue.tag_Sequence) { if (certfac == null) { certificates[count] = new X509CertImpl(certVals[i]); } else { byte[] encoded = certVals[i].toByteArray(); bais = new ByteArrayInputStream(encoded); certificates[count] = (X509Certificate)certfac.generateCertificate(bais); bais.close(); bais = null; } count++; } } catch (CertificateException ce) { ParsingException pe = new ParsingException(ce.getMessage()); pe.initCause(ce); throw pe; } catch (IOException ioe) { ParsingException pe = new ParsingException(ioe.getMessage()); pe.initCause(ioe); throw pe; } finally { if (bais != null) bais.close(); } } if (count != len) { certificates = Arrays.copyOf(certificates, count); } } // check if crls (implicit tag) are provided (crls are OPTIONAL) if ((byte)(dis.peekByte()) == (byte)0xA1) { DerValue[] crlVals = dis.getSet(1, true); len = crlVals.length; crls = new X509CRL[len]; for (int i = 0; i < len; i++) { ByteArrayInputStream bais = null; try { if (certfac == null) crls[i] = new X509CRLImpl(crlVals[i]); else { byte[] encoded = crlVals[i].toByteArray(); bais = new ByteArrayInputStream(encoded); crls[i] = (X509CRL) certfac.generateCRL(bais); bais.close(); bais = null; } } catch (CRLException e) { ParsingException pe = new ParsingException(e.getMessage()); pe.initCause(e); throw pe; } finally { if (bais != null) bais.close(); } } } // signerInfos DerValue[] signerInfoVals = dis.getSet(1); len = signerInfoVals.length; signerInfos = new SignerInfo[len]; for (int i = 0; i < len; i++) { DerInputStream in = signerInfoVals[i].toDerInputStream(); signerInfos[i] = new SignerInfo(in); } } /* * Parses an old-style SignedData encoding (for backwards * compatibility with JDK1.1.x). */ private void parseOldSignedData(DerValue val) throws ParsingException, IOException { DerInputStream dis = val.toDerInputStream(); // Version version = dis.getBigInteger(); // digestAlgorithmIds DerValue[] digestAlgorithmIdVals = dis.getSet(1); int len = digestAlgorithmIdVals.length; digestAlgorithmIds = new AlgorithmId[len]; try { for (int i = 0; i < len; i++) { DerValue oid = digestAlgorithmIdVals[i]; digestAlgorithmIds[i] = AlgorithmId.parse(oid); } } catch (IOException e) { throw new ParsingException("Error parsing digest AlgorithmId IDs"); } // contentInfo contentInfo = new ContentInfo(dis, true); // certificates CertificateFactory certfac = null; try { certfac = CertificateFactory.getInstance("X.509"); } catch (CertificateException ce) { // do nothing } DerValue[] certVals = dis.getSet(2); len = certVals.length; certificates = new X509Certificate[len]; for (int i = 0; i < len; i++) { ByteArrayInputStream bais = null; try { if (certfac == null) certificates[i] = new X509CertImpl(certVals[i]); else { byte[] encoded = certVals[i].toByteArray(); bais = new ByteArrayInputStream(encoded); certificates[i] = (X509Certificate)certfac.generateCertificate(bais); bais.close(); bais = null; } } catch (CertificateException ce) { ParsingException pe = new ParsingException(ce.getMessage()); pe.initCause(ce); throw pe; } catch (IOException ioe) { ParsingException pe = new ParsingException(ioe.getMessage()); pe.initCause(ioe); throw pe; } finally { if (bais != null) bais.close(); } } // crls are ignored. dis.getSet(0); // signerInfos DerValue[] signerInfoVals = dis.getSet(1); len = signerInfoVals.length; signerInfos = new SignerInfo[len]; for (int i = 0; i < len; i++) { DerInputStream in = signerInfoVals[i].toDerInputStream(); signerInfos[i] = new SignerInfo(in, true); } } /** * Encodes the signed data to an output stream. * * @param out the output stream to write the encoded data to. * @exception IOException on encoding errors. */ public void encodeSignedData(OutputStream out) throws IOException { DerOutputStream derout = new DerOutputStream(); encodeSignedData(derout); out.write(derout.toByteArray()); } /** * Encodes the signed data to a DerOutputStream. * * @param out the DerOutputStream to write the encoded data to. * @exception IOException on encoding errors. */ public void encodeSignedData(DerOutputStream out) throws IOException { DerOutputStream signedData = new DerOutputStream(); // version signedData.putInteger(version); // digestAlgorithmIds signedData.putOrderedSetOf(DerValue.tag_Set, digestAlgorithmIds); // contentInfo contentInfo.encode(signedData); // certificates (optional) if (certificates != null && certificates.length != 0) { // cast to X509CertImpl[] since X509CertImpl implements DerEncoder X509CertImpl implCerts[] = new X509CertImpl[certificates.length]; for (int i = 0; i < certificates.length; i++) { if (certificates[i] instanceof X509CertImpl) implCerts[i] = (X509CertImpl) certificates[i]; else { try { byte[] encoded = certificates[i].getEncoded(); implCerts[i] = new X509CertImpl(encoded); } catch (CertificateException ce) { throw new IOException(ce); } } } // Add the certificate set (tagged with [0] IMPLICIT) // to the signed data signedData.putOrderedSetOf((byte)0xA0, implCerts); } // CRLs (optional) if (crls != null && crls.length != 0) { // cast to X509CRLImpl[] since X509CRLImpl implements DerEncoder Set<X509CRLImpl> implCRLs = new HashSet<X509CRLImpl>(crls.length); for (X509CRL crl: crls) { if (crl instanceof X509CRLImpl) implCRLs.add((X509CRLImpl) crl); else { try { byte[] encoded = crl.getEncoded(); implCRLs.add(new X509CRLImpl(encoded)); } catch (CRLException ce) { throw new IOException(ce); } } } // Add the CRL set (tagged with [1] IMPLICIT) // to the signed data signedData.putOrderedSetOf((byte)0xA1, implCRLs.toArray(new X509CRLImpl[implCRLs.size()])); } // signerInfos signedData.putOrderedSetOf(DerValue.tag_Set, signerInfos); // making it a signed data block DerValue signedDataSeq = new DerValue(DerValue.tag_Sequence, signedData.toByteArray()); // making it a content info sequence ContentInfo block = new ContentInfo(ContentInfo.SIGNED_DATA_OID, signedDataSeq); // writing out the contentInfo sequence block.encode(out); } /** * This verifies a given SignerInfo. * * @param info the signer information. * @param bytes the DER encoded content information. * * @exception NoSuchAlgorithmException on unrecognized algorithms. * @exception SignatureException on signature handling errors. */ public SignerInfo verify(SignerInfo info, byte[] bytes) throws NoSuchAlgorithmException, SignatureException { return info.verify(this, bytes); } /** * Returns all signerInfos which self-verify. * * @param bytes the DER encoded content information. * * @exception NoSuchAlgorithmException on unrecognized algorithms. * @exception SignatureException on signature handling errors. */ public SignerInfo[] verify(byte[] bytes) throws NoSuchAlgorithmException, SignatureException { Vector<SignerInfo> intResult = new Vector<SignerInfo>(); for (int i = 0; i < signerInfos.length; i++) { SignerInfo signerInfo = verify(signerInfos[i], bytes); if (signerInfo != null) { intResult.addElement(signerInfo); } } if (!intResult.isEmpty()) { SignerInfo[] result = new SignerInfo[intResult.size()]; intResult.copyInto(result); return result; } return null; } /** * Returns all signerInfos which self-verify. * * @exception NoSuchAlgorithmException on unrecognized algorithms. * @exception SignatureException on signature handling errors. */ public SignerInfo[] verify() throws NoSuchAlgorithmException, SignatureException { return verify(null); } /** * Returns the version number of this PKCS7 block. * @return the version or null if version is not specified * for the content type. */ public BigInteger getVersion() { return version; } /** * Returns the message digest algorithms specified in this PKCS7 block. * @return the array of Digest Algorithms or null if none are specified * for the content type. */ public AlgorithmId[] getDigestAlgorithmIds() { return digestAlgorithmIds; } /** * Returns the content information specified in this PKCS7 block. */ public ContentInfo getContentInfo() { return contentInfo; } /** * Returns the X.509 certificates listed in this PKCS7 block. * @return a clone of the array of X.509 certificates or null if * none are specified for the content type. */ public X509Certificate[] getCertificates() { if (certificates != null) return certificates.clone(); else return null; } /** * Returns the X.509 crls listed in this PKCS7 block. * @return a clone of the array of X.509 crls or null if none * are specified for the content type. */ public X509CRL[] getCRLs() { if (crls != null) return crls.clone(); else return null; } /** * Returns the signer's information specified in this PKCS7 block. * @return the array of Signer Infos or null if none are specified * for the content type. */ public SignerInfo[] getSignerInfos() { return signerInfos; } /** * Returns the X.509 certificate listed in this PKCS7 block * which has a matching serial number and Issuer name, or * null if one is not found. * * @param serial the serial number of the certificate to retrieve. * @param issuerName the Distinguished Name of the Issuer. */ public X509Certificate getCertificate(BigInteger serial, X500Name issuerName) { if (certificates != null) { if (certIssuerNames == null) populateCertIssuerNames(); for (int i = 0; i < certificates.length; i++) { X509Certificate cert = certificates[i]; BigInteger thisSerial = cert.getSerialNumber(); if (serial.equals(thisSerial) && issuerName.equals(certIssuerNames[i])) { return cert; } } } return null; } /** * Populate array of Issuer DNs from certificates and convert * each Principal to type X500Name if necessary. */ private void populateCertIssuerNames() { if (certificates == null) return; certIssuerNames = new Principal[certificates.length]; for (int i = 0; i < certificates.length; i++) { X509Certificate cert = certificates[i]; Principal certIssuerName = cert.getIssuerDN(); if (!(certIssuerName instanceof X500Name)) { // must extract the original encoded form of DN for // subsequent name comparison checks (converting to a // String and back to an encoded DN could cause the // types of String attribute values to be changed) try { X509CertInfo tbsCert = new X509CertInfo(cert.getTBSCertificate()); certIssuerName = (Principal) tbsCert.get(X509CertInfo.ISSUER + "." + X509CertInfo.DN_NAME); } catch (Exception e) { // error generating X500Name object from the cert's // issuer DN, leave name as is. } } certIssuerNames[i] = certIssuerName; } } /** * Returns the PKCS7 block in a printable string form. */ public String toString() { String out = ""; out += contentInfo + "\n"; if (version != null) out += "PKCS7 :: version: " + Debug.toHexString(version) + "\n"; if (digestAlgorithmIds != null) { out += "PKCS7 :: digest AlgorithmIds: \n"; for (int i = 0; i < digestAlgorithmIds.length; i++) out += "\t" + digestAlgorithmIds[i] + "\n"; } if (certificates != null) { out += "PKCS7 :: certificates: \n"; for (int i = 0; i < certificates.length; i++) out += "\t" + i + ". " + certificates[i] + "\n"; } if (crls != null) { out += "PKCS7 :: crls: \n"; for (int i = 0; i < crls.length; i++) out += "\t" + i + ". " + crls[i] + "\n"; } if (signerInfos != null) { out += "PKCS7 :: signer infos: \n"; for (int i = 0; i < signerInfos.length; i++) out += ("\t" + i + ". " + signerInfos[i] + "\n"); } return out; } /** * Returns true if this is a JDK1.1.x-style PKCS#7 block, and false * otherwise. */ public boolean isOldStyle() { return this.oldStyle; } /** * Assembles a PKCS #7 signed data message that optionally includes a * signature timestamp. * * @param signature the signature bytes * @param signerChain the signer's X.509 certificate chain * @param content the content that is signed; specify null to not include * it in the PKCS7 data * @param signatureAlgorithm the name of the signature algorithm * @param tsaURI the URI of the Timestamping Authority; or null if no * timestamp is requested * @param tSAPolicyID the TSAPolicyID of the Timestamping Authority as a * numerical object identifier; or null if we leave the TSA server * to choose one. This argument is only used when tsaURI is provided * @return the bytes of the encoded PKCS #7 signed data message * @throws NoSuchAlgorithmException The exception is thrown if the signature * algorithm is unrecognised. * @throws CertificateException The exception is thrown if an error occurs * while processing the signer's certificate or the TSA's * certificate. * @throws IOException The exception is thrown if an error occurs while * generating the signature timestamp or while generating the signed * data message. */ public static byte[] generateSignedData(byte[] signature, X509Certificate[] signerChain, byte[] content, String signatureAlgorithm, URI tsaURI, String tSAPolicyID) throws CertificateException, IOException, NoSuchAlgorithmException { // Generate the timestamp token PKCS9Attributes unauthAttrs = null; if (tsaURI != null) { // Timestamp the signature HttpTimestamper tsa = new HttpTimestamper(tsaURI); byte[] tsToken = generateTimestampToken(tsa, tSAPolicyID, signature); // Insert the timestamp token into the PKCS #7 signer info element // (as an unsigned attribute) unauthAttrs = new PKCS9Attributes(new PKCS9Attribute[]{ new PKCS9Attribute( PKCS9Attribute.SIGNATURE_TIMESTAMP_TOKEN_STR, tsToken)}); } // Create the SignerInfo X500Name issuerName = X500Name.asX500Name(signerChain[0].getIssuerX500Principal()); BigInteger serialNumber = signerChain[0].getSerialNumber(); String encAlg = AlgorithmId.getEncAlgFromSigAlg(signatureAlgorithm); String digAlg = AlgorithmId.getDigAlgFromSigAlg(signatureAlgorithm); SignerInfo signerInfo = new SignerInfo(issuerName, serialNumber, AlgorithmId.get(digAlg), null, AlgorithmId.get(encAlg), signature, unauthAttrs); // Create the PKCS #7 signed data message SignerInfo[] signerInfos = {signerInfo}; AlgorithmId[] algorithms = {signerInfo.getDigestAlgorithmId()}; // Include or exclude content ContentInfo contentInfo = (content == null) ? new ContentInfo(ContentInfo.DATA_OID, null) : new ContentInfo(content); PKCS7 pkcs7 = new PKCS7(algorithms, contentInfo, signerChain, signerInfos); ByteArrayOutputStream p7out = new ByteArrayOutputStream(); pkcs7.encodeSignedData(p7out); return p7out.toByteArray(); } /** * Requests, processes and validates a timestamp token from a TSA using * common defaults. Uses the following defaults in the timestamp request: * SHA-1 for the hash algorithm, a 64-bit nonce, and request certificate * set to true. * * @param tsa the timestamping authority to use * @param tSAPolicyID the TSAPolicyID of the Timestamping Authority as a * numerical object identifier; or null if we leave the TSA server * to choose one * @param toBeTimestamped the token that is to be timestamped * @return the encoded timestamp token * @throws IOException The exception is thrown if an error occurs while * communicating with the TSA, or a non-null * TSAPolicyID is specified in the request but it * does not match the one in the reply * @throws CertificateException The exception is thrown if the TSA's * certificate is not permitted for timestamping. */ private static byte[] generateTimestampToken(Timestamper tsa, String tSAPolicyID, byte[] toBeTimestamped) throws IOException, CertificateException { // Generate a timestamp MessageDigest messageDigest = null; TSRequest tsQuery = null; try { // SHA-1 is always used. messageDigest = MessageDigest.getInstance("SHA-1"); tsQuery = new TSRequest(tSAPolicyID, toBeTimestamped, messageDigest); } catch (NoSuchAlgorithmException e) { // ignore } // Generate a nonce BigInteger nonce = null; if (SecureRandomHolder.RANDOM != null) { nonce = new BigInteger(64, SecureRandomHolder.RANDOM); tsQuery.setNonce(nonce); } tsQuery.requestCertificate(true); TSResponse tsReply = tsa.generateTimestamp(tsQuery); int status = tsReply.getStatusCode(); // Handle TSP error if (status != 0 && status != 1) { throw new IOException("Error generating timestamp: " + tsReply.getStatusCodeAsText() + " " + tsReply.getFailureCodeAsText()); } if (tSAPolicyID != null && !tSAPolicyID.equals(tsReply.getTimestampToken().getPolicyID())) { throw new IOException("TSAPolicyID changed in " + "timestamp token"); } PKCS7 tsToken = tsReply.getToken(); TimestampToken tst = tsReply.getTimestampToken(); if (!tst.getHashAlgorithm().getName().equals("SHA-1")) { throw new IOException("Digest algorithm not SHA-1 in " + "timestamp token"); } if (!MessageDigest.isEqual(tst.getHashedMessage(), tsQuery.getHashedMessage())) { throw new IOException("Digest octets changed in timestamp token"); } BigInteger replyNonce = tst.getNonce(); if (replyNonce == null && nonce != null) { throw new IOException("Nonce missing in timestamp token"); } if (replyNonce != null && !replyNonce.equals(nonce)) { throw new IOException("Nonce changed in timestamp token"); } // Examine the TSA's certificate (if present) for (SignerInfo si: tsToken.getSignerInfos()) { X509Certificate cert = si.getCertificate(tsToken); if (cert == null) { // Error, we've already set tsRequestCertificate = true throw new CertificateException( "Certificate not included in timestamp token"); } else { if (!cert.getCriticalExtensionOIDs().contains( EXTENDED_KEY_USAGE_OID)) { throw new CertificateException( "Certificate is not valid for timestamping"); } List<String> keyPurposes = cert.getExtendedKeyUsage(); if (keyPurposes == null || !keyPurposes.contains(KP_TIMESTAMPING_OID)) { throw new CertificateException( "Certificate is not valid for timestamping"); } } } return tsReply.getEncodedToken(); } }
apache/inlong
35,250
inlong-tubemq/tubemq-server/src/main/java/org/apache/inlong/tubemq/server/master/metamanage/metastore/impl/AbsTopicDeployMapperImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.inlong.tubemq.server.master.metamanage.metastore.impl; import org.apache.inlong.tubemq.corebase.TBaseConstants; import org.apache.inlong.tubemq.corebase.rv.ProcessResult; import org.apache.inlong.tubemq.corebase.utils.ConcurrentHashSet; import org.apache.inlong.tubemq.corebase.utils.KeyBuilderUtils; import org.apache.inlong.tubemq.server.common.TServerConstants; import org.apache.inlong.tubemq.server.common.statusdef.TopicStatus; import org.apache.inlong.tubemq.server.master.metamanage.DataOpErrCode; import org.apache.inlong.tubemq.server.master.metamanage.metastore.dao.entity.BaseEntity; import org.apache.inlong.tubemq.server.master.metamanage.metastore.dao.entity.TopicDeployEntity; import org.apache.inlong.tubemq.server.master.metamanage.metastore.dao.mapper.TopicDeployMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; public abstract class AbsTopicDeployMapperImpl implements TopicDeployMapper { protected static final Logger logger = LoggerFactory.getLogger(AbsTopicDeployMapperImpl.class); // data cache private final ConcurrentHashMap<String/* recordKey */, TopicDeployEntity> topicDeployCache = new ConcurrentHashMap<>(); private final ConcurrentHashMap<Integer/* brokerId */, ConcurrentHashSet<String>> brokerId2RecordCache = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String/* topicName */, ConcurrentHashSet<String>> topicName2RecordCache = new ConcurrentHashMap<>(); private final ConcurrentHashMap<Integer/* brokerId */, ConcurrentHashSet<String>> brokerId2TopicNameCache = new ConcurrentHashMap<>(); public AbsTopicDeployMapperImpl() { // Initial instant } @Override public boolean addTopicDeployConf(TopicDeployEntity entity, StringBuilder strBuff, ProcessResult result) { // Checks whether the record already exists TopicDeployEntity curEntity = topicDeployCache.get(entity.getRecordKey()); if (curEntity != null) { if (curEntity.isValidTopicStatus()) { result.setFailResult(DataOpErrCode.DERR_EXISTED.getCode(), strBuff.append("Existed record found for brokerId-topicName(") .append(curEntity.getRecordKey()).append(")!").toString()); } else { result.setFailResult(DataOpErrCode.DERR_EXISTED.getCode(), strBuff.append("Softly deleted record found for brokerId-topicName(") .append(curEntity.getRecordKey()) .append("), please resume or remove it first!").toString()); } strBuff.delete(0, strBuff.length()); return result.isSuccess(); } // valid whether system topic if (!isValidSysTopicConf(entity, strBuff, result)) { return result.isSuccess(); } // check deploy status if still accept publish and subscribe if (!entity.isValidTopicStatus() && (entity.isAcceptPublish() || entity.isAcceptSubscribe())) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("The values of acceptPublish and acceptSubscribe must be false") .append(" when add brokerId-topicName(") .append(entity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } // Store data to persistent if (putConfig2Persistent(entity, strBuff, result)) { putRecord2Caches(entity); } return result.isSuccess(); } @Override public boolean updTopicDeployConf(TopicDeployEntity entity, StringBuilder strBuff, ProcessResult result) { // Checks whether the record already exists TopicDeployEntity curEntity = topicDeployCache.get(entity.getRecordKey()); if (curEntity == null) { result.setFailResult(DataOpErrCode.DERR_NOT_EXIST.getCode(), strBuff.append("Not found topic deploy configure for brokerId-topicName(") .append(entity.getRecordKey()).append(")!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } // Build the entity that need to be updated TopicDeployEntity newEntity = curEntity.clone(); newEntity.updBaseModifyInfo(entity); if (!newEntity.updModifyInfo(entity.getDataVerId(), entity.getTopicId(), entity.getBrokerPort(), entity.getBrokerIp(), entity.getDeployStatus(), entity.getTopicProps())) { result.setFailResult(DataOpErrCode.DERR_UNCHANGED.getCode(), "Topic deploy configure not changed!"); return result.isSuccess(); } // valid whether system topic if (!isValidSysTopicConf(newEntity, strBuff, result)) { return result.isSuccess(); } // check deploy status if (!isValidValuesChange(newEntity, curEntity, strBuff, result)) { return result.isSuccess(); } // Store data to persistent if (putConfig2Persistent(newEntity, strBuff, result)) { putRecord2Caches(newEntity); result.setSuccResult(null); } return result.isSuccess(); } @Override public boolean updTopicDeployStatus(BaseEntity opEntity, int brokerId, String topicName, TopicStatus topicStatus, StringBuilder strBuff, ProcessResult result) { // Checks whether the record already exists TopicDeployEntity curEntity = getTopicConf(brokerId, topicName); if (curEntity == null) { result.setFailResult(DataOpErrCode.DERR_NOT_EXIST.getCode(), strBuff.append("Not found topic deploy configure for brokerId-topicName(") .append(brokerId).append("-").append(topicName) .append(")!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } // Build the entity that need to be updated TopicDeployEntity newEntity = curEntity.clone(); newEntity.updBaseModifyInfo(opEntity); if (!newEntity.updModifyInfo(opEntity.getDataVerId(), TBaseConstants.META_VALUE_UNDEFINED, TBaseConstants.META_VALUE_UNDEFINED, null, topicStatus, null)) { result.setFailResult(DataOpErrCode.DERR_UNCHANGED.getCode(), "Topic deploy configure not changed!"); return result.isSuccess(); } // check deploy status if (!isValidValuesChange(newEntity, curEntity, strBuff, result)) { return result.isSuccess(); } // Store data to persistent if (putConfig2Persistent(newEntity, strBuff, result)) { putRecord2Caches(newEntity); result.setSuccResult(null); } return result.isSuccess(); } @Override public boolean delTopicDeployConf(String recordKey, StringBuilder strBuff, ProcessResult result) { TopicDeployEntity curEntity = topicDeployCache.get(recordKey); if (curEntity == null) { result.setSuccResult(null); return result.isSuccess(); } // check deploy status if still accept publish and subscribe if (curEntity.isAcceptPublish() || curEntity.isAcceptSubscribe()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("The values of acceptPublish and acceptSubscribe must be false") .append(" before delete brokerId-topicName(") .append(curEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } delConfigFromPersistent(recordKey, strBuff); delRecordFromCaches(recordKey); result.setSuccResult(null); return result.isSuccess(); } @Override public boolean delTopicConfByBrokerId(Integer brokerId, StringBuilder strBuff, ProcessResult result) { ConcurrentHashSet<String> recordKeySet = brokerId2RecordCache.get(brokerId); if (recordKeySet == null || recordKeySet.isEmpty()) { result.setSuccResult(null); return result.isSuccess(); } // check deploy status if still accept publish and subscribe TopicDeployEntity curEntity; for (String recordKey : recordKeySet) { curEntity = topicDeployCache.get(recordKey); if (curEntity == null) { continue; } if (curEntity.isAcceptPublish() || curEntity.isAcceptSubscribe()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("The values of acceptPublish and acceptSubscribe must be false") .append(" before delete brokerId-topicName(") .append(curEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } } // delete records for (String recordKey : recordKeySet) { delConfigFromPersistent(recordKey, strBuff); delRecordFromCaches(recordKey); } result.setSuccResult(null); return result.isSuccess(); } @Override public boolean hasConfiguredTopics(int brokerId) { ConcurrentHashSet<String> keySet = brokerId2RecordCache.get(brokerId); return (keySet != null && !keySet.isEmpty()); } @Override public boolean isTopicDeployed(String topicName) { ConcurrentHashSet<String> deploySet = topicName2RecordCache.get(topicName); return (deploySet != null && !deploySet.isEmpty()); } @Override public List<TopicDeployEntity> getTopicConf(TopicDeployEntity qryEntity) { List<TopicDeployEntity> retEntities = new ArrayList<>(); if (qryEntity == null) { retEntities.addAll(topicDeployCache.values()); } else { for (TopicDeployEntity entity : topicDeployCache.values()) { if (entity != null && entity.isMatched(qryEntity, true)) { retEntities.add(entity); } } } return retEntities; } @Override public TopicDeployEntity getTopicConf(int brokerId, String topicName) { String recordKey = KeyBuilderUtils.buildTopicConfRecKey(brokerId, topicName); return topicDeployCache.get(recordKey); } @Override public TopicDeployEntity getTopicConfByeRecKey(String recordKey) { return topicDeployCache.get(recordKey); } @Override public Map<String, List<TopicDeployEntity>> getTopicConfMap(Set<String> topicNameSet, Set<Integer> brokerIdSet, TopicDeployEntity qryEntity) { List<TopicDeployEntity> items; Map<String, List<TopicDeployEntity>> retEntityMap = new HashMap<>(); // get matched keys by topicNameSet and brokerIdSet Set<String> matchedKeySet = getMatchedRecords(topicNameSet, brokerIdSet); // filter record by qryEntity if (matchedKeySet == null) { for (TopicDeployEntity entry : topicDeployCache.values()) { if (entry == null || (qryEntity != null && !entry.isMatched(qryEntity, true))) { continue; } items = retEntityMap.computeIfAbsent( entry.getTopicName(), k -> new ArrayList<>()); items.add(entry); } } else { TopicDeployEntity entry; for (String recKey : matchedKeySet) { entry = topicDeployCache.get(recKey); if (entry == null || (qryEntity != null && !entry.isMatched(qryEntity, true))) { continue; } items = retEntityMap.computeIfAbsent( entry.getTopicName(), k -> new ArrayList<>()); items.add(entry); } } return retEntityMap; } @Override public Map<Integer, List<TopicDeployEntity>> getTopicDeployInfoMap(Set<String> topicNameSet, Set<Integer> brokerIdSet) { List<TopicDeployEntity> items; Map<Integer, List<TopicDeployEntity>> retEntityMap = new HashMap<>(); if (brokerIdSet != null) { for (Integer brokerId : brokerIdSet) { retEntityMap.put(brokerId, new ArrayList<>()); } } // get matched keys by topicNameSet and brokerIdSet Set<String> matchedKeySet = getMatchedRecords(topicNameSet, brokerIdSet); // get record by keys if (matchedKeySet == null) { matchedKeySet = new HashSet<>(topicDeployCache.keySet()); } for (String recordKey : matchedKeySet) { TopicDeployEntity entity = topicDeployCache.get(recordKey); if (entity == null) { continue; } items = retEntityMap.computeIfAbsent( entity.getBrokerId(), k -> new ArrayList<>()); items.add(entity); } return retEntityMap; } @Override public Map<String, List<TopicDeployEntity>> getTopicConfMapByTopicAndBrokerIds(Set<String> topicSet, Set<Integer> brokerIdSet) { TopicDeployEntity tmpEntity; List<TopicDeployEntity> itemLst; Map<String, List<TopicDeployEntity>> retEntityMap = new HashMap<>(); // get matched keys by topicNameSet and brokerIdSet Set<String> matchedKeySet = getMatchedRecords(topicSet, brokerIdSet); // get records by matched keys if (matchedKeySet == null) { for (TopicDeployEntity entity : topicDeployCache.values()) { if (entity == null) { continue; } itemLst = retEntityMap.computeIfAbsent( entity.getTopicName(), k -> new ArrayList<>()); itemLst.add(entity); } } else { for (String key : matchedKeySet) { tmpEntity = topicDeployCache.get(key); if (tmpEntity == null) { continue; } itemLst = retEntityMap.computeIfAbsent( tmpEntity.getTopicName(), k -> new ArrayList<>()); itemLst.add(tmpEntity); } } return retEntityMap; } @Override public Map<String, TopicDeployEntity> getConfiguredTopicInfo(int brokerId) { TopicDeployEntity tmpEntity; Map<String, TopicDeployEntity> retEntityMap = new HashMap<>(); ConcurrentHashSet<String> records = brokerId2RecordCache.get(brokerId); if (records == null || records.isEmpty()) { return retEntityMap; } for (String key : records) { tmpEntity = topicDeployCache.get(key); if (tmpEntity == null) { continue; } retEntityMap.put(tmpEntity.getTopicName(), tmpEntity); } return retEntityMap; } @Override public Map<Integer, Set<String>> getConfiguredTopicInfo(Set<Integer> brokerIdSet) { Set<String> topicSet; ConcurrentHashSet<String> deploySet; Map<Integer, Set<String>> retEntityMap = new HashMap<>(); if (brokerIdSet == null || brokerIdSet.isEmpty()) { for (Map.Entry<Integer, ConcurrentHashSet<String>> entry : brokerId2TopicNameCache.entrySet()) { if (entry.getKey() == null) { continue; } topicSet = new HashSet<>(); if (entry.getValue() != null) { topicSet.addAll(entry.getValue()); } retEntityMap.put(entry.getKey(), topicSet); } } else { for (Integer brokerId : brokerIdSet) { if (brokerId == null) { continue; } topicSet = new HashSet<>(); deploySet = brokerId2TopicNameCache.get(brokerId); if (deploySet != null) { topicSet.addAll(deploySet); } retEntityMap.put(brokerId, topicSet); } } return retEntityMap; } @Override public Map<String, Map<Integer, String>> getTopicBrokerInfo(Set<String> topicNameSet) { ConcurrentHashSet<String> keySet; Map<Integer, String> brokerInfoMap; Map<String, Map<Integer, String>> retEntityMap = new HashMap<>(); if (topicNameSet == null || topicNameSet.isEmpty()) { for (TopicDeployEntity entry : topicDeployCache.values()) { if (entry == null) { continue; } brokerInfoMap = retEntityMap.computeIfAbsent( entry.getTopicName(), k -> new HashMap<>()); brokerInfoMap.put(entry.getBrokerId(), entry.getBrokerIp()); } } else { for (String topicName : topicNameSet) { if (topicName == null) { continue; } brokerInfoMap = retEntityMap.computeIfAbsent(topicName, k -> new HashMap<>()); keySet = topicName2RecordCache.get(topicName); if (keySet != null) { for (String key : keySet) { TopicDeployEntity entry = topicDeployCache.get(key); if (entry != null) { brokerInfoMap.put(entry.getBrokerId(), entry.getBrokerIp()); } } } } } return retEntityMap; } @Override public Set<Integer> getDeployedBrokerIdByTopic(Set<String> topicNameSet) { ConcurrentHashSet<String> keySet; Set<Integer> retSet = new HashSet<>(); if (topicNameSet == null || topicNameSet.isEmpty()) { return retSet; } for (String topicName : topicNameSet) { if (topicName == null) { continue; } keySet = topicName2RecordCache.get(topicName); if (keySet != null) { for (String key : keySet) { TopicDeployEntity entry = topicDeployCache.get(key); if (entry != null) { retSet.add(entry.getBrokerId()); } } } } return retSet; } @Override public Set<String> getDeployedTopicSet() { return new HashSet<>(topicName2RecordCache.keySet()); } /** * Clear cached data */ protected void clearCachedData() { topicName2RecordCache.clear(); brokerId2RecordCache.clear(); brokerId2TopicNameCache.clear(); topicDeployCache.clear(); } /** * Add or update a record * * @param entity need added or updated entity */ protected void putRecord2Caches(TopicDeployEntity entity) { topicDeployCache.put(entity.getRecordKey(), entity); // add topic index map ConcurrentHashSet<String> keySet = topicName2RecordCache.get(entity.getTopicName()); if (keySet == null) { ConcurrentHashSet<String> tmpSet = new ConcurrentHashSet<>(); keySet = topicName2RecordCache.putIfAbsent(entity.getTopicName(), tmpSet); if (keySet == null) { keySet = tmpSet; } } keySet.add(entity.getRecordKey()); // add brokerId index map keySet = brokerId2RecordCache.get(entity.getBrokerId()); if (keySet == null) { ConcurrentHashSet<String> tmpSet = new ConcurrentHashSet<>(); keySet = brokerId2RecordCache.putIfAbsent(entity.getBrokerId(), tmpSet); if (keySet == null) { keySet = tmpSet; } } keySet.add(entity.getRecordKey()); // add brokerId topic map keySet = brokerId2TopicNameCache.get(entity.getBrokerId()); if (keySet == null) { ConcurrentHashSet<String> tmpSet = new ConcurrentHashSet<>(); keySet = brokerId2TopicNameCache.putIfAbsent(entity.getBrokerId(), tmpSet); if (keySet == null) { keySet = tmpSet; } } keySet.add(entity.getTopicName()); } /** * Put topic deploy configure information into persistent store * * @param entity need add record * @param strBuff the string buffer * @param result process result with old value * @return the process result */ protected abstract boolean putConfig2Persistent(TopicDeployEntity entity, StringBuilder strBuff, ProcessResult result); /** * Delete topic deploy configure information from persistent storage * * @param recordKey the record key * @param strBuff the string buffer * @return the process result */ protected abstract boolean delConfigFromPersistent(String recordKey, StringBuilder strBuff); private void delRecordFromCaches(String recordKey) { TopicDeployEntity curEntity = topicDeployCache.remove(recordKey); if (curEntity == null) { return; } // add topic index ConcurrentHashSet<String> keySet = topicName2RecordCache.get(curEntity.getTopicName()); if (keySet != null) { keySet.remove(recordKey); if (keySet.isEmpty()) { topicName2RecordCache.remove(curEntity.getTopicName(), new ConcurrentHashSet<>()); } } // delete brokerId index keySet = brokerId2RecordCache.get(curEntity.getBrokerId()); if (keySet != null) { keySet.remove(recordKey); if (keySet.isEmpty()) { brokerId2RecordCache.remove(curEntity.getBrokerId(), new ConcurrentHashSet<>()); } } // delete broker topic map keySet = brokerId2TopicNameCache.get(curEntity.getBrokerId()); if (keySet != null) { keySet.remove(curEntity.getTopicName()); if (keySet.isEmpty()) { brokerId2TopicNameCache.remove(curEntity.getBrokerId(), new ConcurrentHashSet<>()); } } } private Set<String> getMatchedRecords(Set<String> topicNameSet, Set<Integer> brokerIdSet) { ConcurrentHashSet<String> keySet; Set<String> topicKeySet = null; Set<String> brokerKeySet = null; Set<String> matchedKeySet = null; // get deploy records set by topicName if (topicNameSet != null && !topicNameSet.isEmpty()) { topicKeySet = new HashSet<>(); for (String topicName : topicNameSet) { keySet = topicName2RecordCache.get(topicName); if (keySet != null && !keySet.isEmpty()) { topicKeySet.addAll(keySet); } } if (topicKeySet.isEmpty()) { return Collections.emptySet(); } } // get deploy records set by brokerId if (brokerIdSet != null && !brokerIdSet.isEmpty()) { brokerKeySet = new HashSet<>(); for (Integer brokerId : brokerIdSet) { keySet = brokerId2RecordCache.get(brokerId); if (keySet != null && !keySet.isEmpty()) { brokerKeySet.addAll(keySet); } } if (brokerKeySet.isEmpty()) { return Collections.emptySet(); } } // get intersection from topicKeySet and brokerKeySet if (topicKeySet != null || brokerKeySet != null) { if (topicKeySet == null) { matchedKeySet = new HashSet<>(brokerKeySet); } else { if (brokerKeySet == null) { matchedKeySet = new HashSet<>(topicKeySet); } else { matchedKeySet = new HashSet<>(); for (String record : topicKeySet) { if (brokerKeySet.contains(record)) { matchedKeySet.add(record); } } } } } return matchedKeySet; } /** * Check whether the change of deploy values is valid * Attention, the newEntity and newEntity must not equal * * @param newEntity the entity to be updated * @param curEntity the current entity * @param strBuff string buffer * @param result check result of parameter value * @return true for valid, false for invalid */ private boolean isValidValuesChange(TopicDeployEntity newEntity, TopicDeployEntity curEntity, StringBuilder strBuff, ProcessResult result) { // check if shrink data store block if (newEntity.getNumPartitions() != TBaseConstants.META_VALUE_UNDEFINED && newEntity.getNumPartitions() < curEntity.getNumPartitions()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_VALUE.getCode(), strBuff.append("Partition number less than before,") .append(" new value is ").append(newEntity.getNumPartitions()) .append(", current value is ").append(curEntity.getNumPartitions()) .append("in brokerId-topicName(").append(curEntity.getRecordKey()) .append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } if (newEntity.getNumTopicStores() != TBaseConstants.META_VALUE_UNDEFINED && newEntity.getNumTopicStores() < curEntity.getNumTopicStores()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_VALUE.getCode(), strBuff.append("TopicStores number less than before,") .append(" new value is ").append(newEntity.getNumTopicStores()) .append(", current value is ").append(curEntity.getNumTopicStores()) .append("in brokerId-topicName(").append(curEntity.getRecordKey()) .append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } // check whether the deploy status is equal if (newEntity.getTopicStatus() == curEntity.getTopicStatus()) { if (!newEntity.isValidTopicStatus()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("Softly deleted record cannot be changed,") .append(" please resume or hard remove for brokerId-topicName(") .append(newEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } return true; } // check deploy status case from valid to invalid if (curEntity.isValidTopicStatus() && !newEntity.isValidTopicStatus()) { if (curEntity.isAcceptPublish() || curEntity.isAcceptSubscribe()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("The values of acceptPublish and acceptSubscribe must be false") .append(" before change status of brokerId-topicName(") .append(curEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } if (newEntity.getTopicStatus().getCode() > TopicStatus.STATUS_TOPIC_SOFT_DELETE.getCode()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("Please softly deleted the brokerId-topicName(") .append(newEntity.getRecordKey()).append(") record first!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } return true; } // check deploy status case from invalid to invalid if (!curEntity.isValidTopicStatus() && !newEntity.isValidTopicStatus()) { if (!((curEntity.getTopicStatus() == TopicStatus.STATUS_TOPIC_SOFT_DELETE && newEntity.getTopicStatus() == TopicStatus.STATUS_TOPIC_SOFT_REMOVE) || (curEntity.getTopicStatus() == TopicStatus.STATUS_TOPIC_SOFT_REMOVE && newEntity.getTopicStatus() == TopicStatus.STATUS_TOPIC_HARD_REMOVE))) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("Illegal transfer status from ") .append(curEntity.getTopicStatus().getDescription()) .append(" to ").append(newEntity.getTopicStatus().getDescription()) .append(" for the brokerId-topicName(") .append(newEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } if (newEntity.isAcceptPublish() || newEntity.isAcceptSubscribe()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("The values of acceptPublish and acceptSubscribe must be false") .append(" before change status of brokerId-topicName(") .append(newEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } return true; } // check deploy status case from invalid to valid if (!curEntity.isValidTopicStatus() && newEntity.isValidTopicStatus()) { if (curEntity.getTopicStatus() != TopicStatus.STATUS_TOPIC_SOFT_DELETE) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("Illegal transfer status from ") .append(curEntity.getTopicStatus().getDescription()) .append(" to ").append(newEntity.getTopicStatus().getDescription()) .append(" for the brokerId-topicName(") .append(newEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return !result.isSuccess(); } if (newEntity.isAcceptPublish() || newEntity.isAcceptSubscribe()) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_STATUS.getCode(), strBuff.append("The values of acceptPublish and acceptSubscribe must be false") .append(" before change status of brokerId-topicName(") .append(newEntity.getRecordKey()).append(") record!").toString()); strBuff.delete(0, strBuff.length()); return !result.isSuccess(); } return false; } return false; } /** * Verify the validity of the configuration value for the system topic * * @param deployEntity the topic configuration that needs to be added or updated * @param strBuff the print info string buffer * @param result the process result return * @return true if valid otherwise false */ private boolean isValidSysTopicConf(TopicDeployEntity deployEntity, StringBuilder strBuff, ProcessResult result) { if (!TServerConstants.OFFSET_HISTORY_NAME.equals(deployEntity.getTopicName())) { return true; } if (deployEntity.getNumTopicStores() != TServerConstants.OFFSET_HISTORY_NUMSTORES) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_VALUE.getCode(), strBuff.append("For system topic") .append(TServerConstants.OFFSET_HISTORY_NAME) .append(", the TopicStores value(") .append(TServerConstants.OFFSET_HISTORY_NUMSTORES) .append(") cannot be changed!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } if (deployEntity.getNumPartitions() != TServerConstants.OFFSET_HISTORY_NUMPARTS) { result.setFailResult(DataOpErrCode.DERR_ILLEGAL_VALUE.getCode(), strBuff.append("For system topic") .append(TServerConstants.OFFSET_HISTORY_NAME) .append(", the Partition value(") .append(TServerConstants.OFFSET_HISTORY_NUMPARTS) .append(") cannot be changed!").toString()); strBuff.delete(0, strBuff.length()); return result.isSuccess(); } return true; } }
googleapis/google-cloud-java
35,077
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/BargeInConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/audio_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * Configuration of the barge-in behavior. Barge-in instructs the API to return * a detected utterance at a proper time while the client is playing back the * response audio from a previous request. When the client sees the * utterance, it should stop the playback and immediately get ready for * receiving the responses for the current request. * * The barge-in handling requires the client to start streaming audio input * as soon as it starts playing back the audio from the previous response. The * playback is modeled into two phases: * * * No barge-in phase: which goes first and during which speech detection * should not be carried out. * * * Barge-in phase: which follows the no barge-in phase and during which * the API starts speech detection and may inform the client that an utterance * has been detected. Note that no-speech event is not expected in this * phase. * * The client provides this configuration in terms of the durations of those * two phases. The durations are measured in terms of the audio length from the * start of the input audio. * * No-speech event is a response with END_OF_UTTERANCE without any transcript * following up. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.BargeInConfig} */ public final class BargeInConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.BargeInConfig) BargeInConfigOrBuilder { private static final long serialVersionUID = 0L; // Use BargeInConfig.newBuilder() to construct. private BargeInConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BargeInConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BargeInConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3_BargeInConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3_BargeInConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.BargeInConfig.class, com.google.cloud.dialogflow.cx.v3.BargeInConfig.Builder.class); } private int bitField0_; public static final int NO_BARGE_IN_DURATION_FIELD_NUMBER = 1; private com.google.protobuf.Duration noBargeInDuration_; /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return Whether the noBargeInDuration field is set. */ @java.lang.Override public boolean hasNoBargeInDuration() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return The noBargeInDuration. */ @java.lang.Override public com.google.protobuf.Duration getNoBargeInDuration() { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getNoBargeInDurationOrBuilder() { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } public static final int TOTAL_DURATION_FIELD_NUMBER = 2; private com.google.protobuf.Duration totalDuration_; /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return Whether the totalDuration field is set. */ @java.lang.Override public boolean hasTotalDuration() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return The totalDuration. */ @java.lang.Override public com.google.protobuf.Duration getTotalDuration() { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getTotalDurationOrBuilder() { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNoBargeInDuration()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getTotalDuration()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNoBargeInDuration()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTotalDuration()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.BargeInConfig)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.BargeInConfig other = (com.google.cloud.dialogflow.cx.v3.BargeInConfig) obj; if (hasNoBargeInDuration() != other.hasNoBargeInDuration()) return false; if (hasNoBargeInDuration()) { if (!getNoBargeInDuration().equals(other.getNoBargeInDuration())) return false; } if (hasTotalDuration() != other.hasTotalDuration()) return false; if (hasTotalDuration()) { if (!getTotalDuration().equals(other.getTotalDuration())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNoBargeInDuration()) { hash = (37 * hash) + NO_BARGE_IN_DURATION_FIELD_NUMBER; hash = (53 * hash) + getNoBargeInDuration().hashCode(); } if (hasTotalDuration()) { hash = (37 * hash) + TOTAL_DURATION_FIELD_NUMBER; hash = (53 * hash) + getTotalDuration().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3.BargeInConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configuration of the barge-in behavior. Barge-in instructs the API to return * a detected utterance at a proper time while the client is playing back the * response audio from a previous request. When the client sees the * utterance, it should stop the playback and immediately get ready for * receiving the responses for the current request. * * The barge-in handling requires the client to start streaming audio input * as soon as it starts playing back the audio from the previous response. The * playback is modeled into two phases: * * * No barge-in phase: which goes first and during which speech detection * should not be carried out. * * * Barge-in phase: which follows the no barge-in phase and during which * the API starts speech detection and may inform the client that an utterance * has been detected. Note that no-speech event is not expected in this * phase. * * The client provides this configuration in terms of the durations of those * two phases. The durations are measured in terms of the audio length from the * start of the input audio. * * No-speech event is a response with END_OF_UTTERANCE without any transcript * following up. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.BargeInConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.BargeInConfig) com.google.cloud.dialogflow.cx.v3.BargeInConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3_BargeInConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3_BargeInConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.BargeInConfig.class, com.google.cloud.dialogflow.cx.v3.BargeInConfig.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.BargeInConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getNoBargeInDurationFieldBuilder(); getTotalDurationFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; noBargeInDuration_ = null; if (noBargeInDurationBuilder_ != null) { noBargeInDurationBuilder_.dispose(); noBargeInDurationBuilder_ = null; } totalDuration_ = null; if (totalDurationBuilder_ != null) { totalDurationBuilder_.dispose(); totalDurationBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3_BargeInConfig_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.BargeInConfig getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.BargeInConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.BargeInConfig build() { com.google.cloud.dialogflow.cx.v3.BargeInConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.BargeInConfig buildPartial() { com.google.cloud.dialogflow.cx.v3.BargeInConfig result = new com.google.cloud.dialogflow.cx.v3.BargeInConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.cx.v3.BargeInConfig result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.noBargeInDuration_ = noBargeInDurationBuilder_ == null ? noBargeInDuration_ : noBargeInDurationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.totalDuration_ = totalDurationBuilder_ == null ? totalDuration_ : totalDurationBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.BargeInConfig) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.BargeInConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.BargeInConfig other) { if (other == com.google.cloud.dialogflow.cx.v3.BargeInConfig.getDefaultInstance()) return this; if (other.hasNoBargeInDuration()) { mergeNoBargeInDuration(other.getNoBargeInDuration()); } if (other.hasTotalDuration()) { mergeTotalDuration(other.getTotalDuration()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNoBargeInDurationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getTotalDurationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Duration noBargeInDuration_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> noBargeInDurationBuilder_; /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return Whether the noBargeInDuration field is set. */ public boolean hasNoBargeInDuration() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return The noBargeInDuration. */ public com.google.protobuf.Duration getNoBargeInDuration() { if (noBargeInDurationBuilder_ == null) { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } else { return noBargeInDurationBuilder_.getMessage(); } } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder setNoBargeInDuration(com.google.protobuf.Duration value) { if (noBargeInDurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } noBargeInDuration_ = value; } else { noBargeInDurationBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder setNoBargeInDuration(com.google.protobuf.Duration.Builder builderForValue) { if (noBargeInDurationBuilder_ == null) { noBargeInDuration_ = builderForValue.build(); } else { noBargeInDurationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder mergeNoBargeInDuration(com.google.protobuf.Duration value) { if (noBargeInDurationBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && noBargeInDuration_ != null && noBargeInDuration_ != com.google.protobuf.Duration.getDefaultInstance()) { getNoBargeInDurationBuilder().mergeFrom(value); } else { noBargeInDuration_ = value; } } else { noBargeInDurationBuilder_.mergeFrom(value); } if (noBargeInDuration_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder clearNoBargeInDuration() { bitField0_ = (bitField0_ & ~0x00000001); noBargeInDuration_ = null; if (noBargeInDurationBuilder_ != null) { noBargeInDurationBuilder_.dispose(); noBargeInDurationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public com.google.protobuf.Duration.Builder getNoBargeInDurationBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNoBargeInDurationFieldBuilder().getBuilder(); } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public com.google.protobuf.DurationOrBuilder getNoBargeInDurationOrBuilder() { if (noBargeInDurationBuilder_ != null) { return noBargeInDurationBuilder_.getMessageOrBuilder(); } else { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getNoBargeInDurationFieldBuilder() { if (noBargeInDurationBuilder_ == null) { noBargeInDurationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getNoBargeInDuration(), getParentForChildren(), isClean()); noBargeInDuration_ = null; } return noBargeInDurationBuilder_; } private com.google.protobuf.Duration totalDuration_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> totalDurationBuilder_; /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return Whether the totalDuration field is set. */ public boolean hasTotalDuration() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return The totalDuration. */ public com.google.protobuf.Duration getTotalDuration() { if (totalDurationBuilder_ == null) { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } else { return totalDurationBuilder_.getMessage(); } } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder setTotalDuration(com.google.protobuf.Duration value) { if (totalDurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } totalDuration_ = value; } else { totalDurationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder setTotalDuration(com.google.protobuf.Duration.Builder builderForValue) { if (totalDurationBuilder_ == null) { totalDuration_ = builderForValue.build(); } else { totalDurationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder mergeTotalDuration(com.google.protobuf.Duration value) { if (totalDurationBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && totalDuration_ != null && totalDuration_ != com.google.protobuf.Duration.getDefaultInstance()) { getTotalDurationBuilder().mergeFrom(value); } else { totalDuration_ = value; } } else { totalDurationBuilder_.mergeFrom(value); } if (totalDuration_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder clearTotalDuration() { bitField0_ = (bitField0_ & ~0x00000002); totalDuration_ = null; if (totalDurationBuilder_ != null) { totalDurationBuilder_.dispose(); totalDurationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public com.google.protobuf.Duration.Builder getTotalDurationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTotalDurationFieldBuilder().getBuilder(); } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public com.google.protobuf.DurationOrBuilder getTotalDurationOrBuilder() { if (totalDurationBuilder_ != null) { return totalDurationBuilder_.getMessageOrBuilder(); } else { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getTotalDurationFieldBuilder() { if (totalDurationBuilder_ == null) { totalDurationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getTotalDuration(), getParentForChildren(), isClean()); totalDuration_ = null; } return totalDurationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.BargeInConfig) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.BargeInConfig) private static final com.google.cloud.dialogflow.cx.v3.BargeInConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.BargeInConfig(); } public static com.google.cloud.dialogflow.cx.v3.BargeInConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BargeInConfig> PARSER = new com.google.protobuf.AbstractParser<BargeInConfig>() { @java.lang.Override public BargeInConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BargeInConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BargeInConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.BargeInConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,047
java-errorreporting/proto-google-cloud-error-reporting-v1beta1/src/main/java/com/google/devtools/clouderrorreporting/v1beta1/ServiceContextFilter.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto // Protobuf Java Version: 3.25.8 package com.google.devtools.clouderrorreporting.v1beta1; /** * * * <pre> * Specifies criteria for filtering a subset of service contexts. * The fields in the filter correspond to the fields in `ServiceContext`. * Only exact, case-sensitive matches are supported. * If a field is unset or empty, it matches arbitrary values. * </pre> * * Protobuf type {@code google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter} */ public final class ServiceContextFilter extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) ServiceContextFilterOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceContextFilter.newBuilder() to construct. private ServiceContextFilter(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ServiceContextFilter() { service_ = ""; version_ = ""; resourceType_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ServiceContextFilter(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.clouderrorreporting.v1beta1.ErrorStatsServiceProto .internal_static_google_devtools_clouderrorreporting_v1beta1_ServiceContextFilter_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.clouderrorreporting.v1beta1.ErrorStatsServiceProto .internal_static_google_devtools_clouderrorreporting_v1beta1_ServiceContextFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.class, com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.Builder.class); } public static final int SERVICE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object service_ = ""; /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The service. */ @java.lang.Override public java.lang.String getService() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); service_ = s; return s; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for service. */ @java.lang.Override public com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VERSION_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object version_ = ""; /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The version. */ @java.lang.Override public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for version. */ @java.lang.Override public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_TYPE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object resourceType_ = ""; /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The resourceType. */ @java.lang.Override public java.lang.String getResourceType() { java.lang.Object ref = resourceType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceType_ = s; return s; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for resourceType. */ @java.lang.Override public com.google.protobuf.ByteString getResourceTypeBytes() { java.lang.Object ref = resourceType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, service_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, version_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, resourceType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, service_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, version_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, resourceType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter)) { return super.equals(obj); } com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter other = (com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) obj; if (!getService().equals(other.getService())) return false; if (!getVersion().equals(other.getVersion())) return false; if (!getResourceType().equals(other.getResourceType())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SERVICE_FIELD_NUMBER; hash = (53 * hash) + getService().hashCode(); hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + getResourceType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specifies criteria for filtering a subset of service contexts. * The fields in the filter correspond to the fields in `ServiceContext`. * Only exact, case-sensitive matches are supported. * If a field is unset or empty, it matches arbitrary values. * </pre> * * Protobuf type {@code google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.clouderrorreporting.v1beta1.ErrorStatsServiceProto .internal_static_google_devtools_clouderrorreporting_v1beta1_ServiceContextFilter_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.clouderrorreporting.v1beta1.ErrorStatsServiceProto .internal_static_google_devtools_clouderrorreporting_v1beta1_ServiceContextFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.class, com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.Builder.class); } // Construct using // com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; service_ = ""; version_ = ""; resourceType_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.clouderrorreporting.v1beta1.ErrorStatsServiceProto .internal_static_google_devtools_clouderrorreporting_v1beta1_ServiceContextFilter_descriptor; } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter getDefaultInstanceForType() { return com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter .getDefaultInstance(); } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter build() { com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter buildPartial() { com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter result = new com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.service_ = service_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.version_ = version_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.resourceType_ = resourceType_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) { return mergeFrom( (com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter other) { if (other == com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter .getDefaultInstance()) return this; if (!other.getService().isEmpty()) { service_ = other.service_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getVersion().isEmpty()) { version_ = other.version_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getResourceType().isEmpty()) { resourceType_ = other.resourceType_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { service_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 18 case 26: { version_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 26 case 34: { resourceType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object service_ = ""; /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The service. */ public java.lang.String getService() { java.lang.Object ref = service_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); service_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for service. */ public com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The service to set. * @return This builder for chaining. */ public Builder setService(java.lang.String value) { if (value == null) { throw new NullPointerException(); } service_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearService() { service_ = getDefaultInstance().getService(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). * </pre> * * <code>string service = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for service to set. * @return This builder for chaining. */ public Builder setServiceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); service_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object version_ = ""; /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The version. */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for version. */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearVersion() { version_ = getDefaultInstance().getVersion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). * </pre> * * <code>string version = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for version to set. * @return This builder for chaining. */ public Builder setVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object resourceType_ = ""; /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The resourceType. */ public java.lang.String getResourceType() { java.lang.Object ref = resourceType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for resourceType. */ public com.google.protobuf.ByteString getResourceTypeBytes() { java.lang.Object ref = resourceType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The resourceType to set. * @return This builder for chaining. */ public Builder setResourceType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearResourceType() { resourceType_ = getDefaultInstance().getResourceType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. The exact value to match against * [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). * </pre> * * <code>string resource_type = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for resourceType to set. * @return This builder for chaining. */ public Builder setResourceTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) } // @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter) private static final com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter(); } public static com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ServiceContextFilter> PARSER = new com.google.protobuf.AbstractParser<ServiceContextFilter>() { @java.lang.Override public ServiceContextFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ServiceContextFilter> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ServiceContextFilter> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,267
java-policysimulator/google-cloud-policysimulator/src/main/java/com/google/cloud/policysimulator/v1/SimulatorClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.policysimulator.v1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.httpjson.longrunning.OperationsClient; import com.google.api.gax.longrunning.OperationFuture; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.policysimulator.v1.stub.SimulatorStub; import com.google.cloud.policysimulator.v1.stub.SimulatorStubSettings; import com.google.common.util.concurrent.MoreExecutors; import com.google.longrunning.Operation; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Policy Simulator API service. * * <p>Policy Simulator is a collection of endpoints for creating, running, and viewing a * [Replay][google.cloud.policysimulator.v1.Replay]. A * [Replay][google.cloud.policysimulator.v1.Replay] is a type of simulation that lets you see how * your principals' access to resources might change if you changed your IAM policy. * * <p>During a [Replay][google.cloud.policysimulator.v1.Replay], Policy Simulator re-evaluates, or * replays, past access attempts under both the current policy and your proposed policy, and * compares those results to determine how your principals' access might change under the proposed * policy. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * ReplayName name = * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]"); * Replay response = simulatorClient.getReplay(name); * } * }</pre> * * <p>Note: close() needs to be called on the SimulatorClient object to clean up resources such as * threads. In the example above, try-with-resources is used, which automatically calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> GetReplay</td> * <td><p> Gets the specified [Replay][google.cloud.policysimulator.v1.Replay]. Each `Replay` is available for at least 7 days.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getReplay(GetReplayRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> getReplay(ReplayName name) * <li><p> getReplay(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getReplayCallable() * </ul> * </td> * </tr> * <tr> * <td><p> CreateReplay</td> * <td><p> Creates and starts a [Replay][google.cloud.policysimulator.v1.Replay] using the given [ReplayConfig][google.cloud.policysimulator.v1.ReplayConfig].</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> createReplayAsync(CreateReplayRequest request) * </ul> * <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p> * <ul> * <li><p> createReplayAsync(String parent, Replay replay) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> createReplayOperationCallable() * <li><p> createReplayCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListReplayResults</td> * <td><p> Lists the results of running a [Replay][google.cloud.policysimulator.v1.Replay].</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listReplayResults(ListReplayResultsRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> listReplayResults(ReplayName parent) * <li><p> listReplayResults(String parent) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listReplayResultsPagedCallable() * <li><p> listReplayResultsCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of SimulatorSettings to create(). * For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * SimulatorSettings simulatorSettings = * SimulatorSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * SimulatorClient simulatorClient = SimulatorClient.create(simulatorSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * SimulatorSettings simulatorSettings = * SimulatorSettings.newBuilder().setEndpoint(myEndpoint).build(); * SimulatorClient simulatorClient = SimulatorClient.create(simulatorSettings); * }</pre> * * <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over * the wire: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * SimulatorSettings simulatorSettings = SimulatorSettings.newHttpJsonBuilder().build(); * SimulatorClient simulatorClient = SimulatorClient.create(simulatorSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class SimulatorClient implements BackgroundResource { private final SimulatorSettings settings; private final SimulatorStub stub; private final OperationsClient httpJsonOperationsClient; private final com.google.longrunning.OperationsClient operationsClient; /** Constructs an instance of SimulatorClient with default settings. */ public static final SimulatorClient create() throws IOException { return create(SimulatorSettings.newBuilder().build()); } /** * Constructs an instance of SimulatorClient, using the given settings. The channels are created * based on the settings passed in, or defaults for any settings that are not set. */ public static final SimulatorClient create(SimulatorSettings settings) throws IOException { return new SimulatorClient(settings); } /** * Constructs an instance of SimulatorClient, using the given stub for making calls. This is for * advanced usage - prefer using create(SimulatorSettings). */ public static final SimulatorClient create(SimulatorStub stub) { return new SimulatorClient(stub); } /** * Constructs an instance of SimulatorClient, using the given settings. This is protected so that * it is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected SimulatorClient(SimulatorSettings settings) throws IOException { this.settings = settings; this.stub = ((SimulatorStubSettings) settings.getStubSettings()).createStub(); this.operationsClient = com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub()); this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub()); } protected SimulatorClient(SimulatorStub stub) { this.settings = null; this.stub = stub; this.operationsClient = com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub()); this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub()); } public final SimulatorSettings getSettings() { return settings; } public SimulatorStub getStub() { return stub; } /** * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ public final com.google.longrunning.OperationsClient getOperationsClient() { return operationsClient; } /** * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ @BetaApi public final OperationsClient getHttpJsonOperationsClient() { return httpJsonOperationsClient; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the specified [Replay][google.cloud.policysimulator.v1.Replay]. Each `Replay` is available * for at least 7 days. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * ReplayName name = * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]"); * Replay response = simulatorClient.getReplay(name); * } * }</pre> * * @param name Required. The name of the [Replay][google.cloud.policysimulator.v1.Replay] to * retrieve, in the following format: * <p>`{projects|folders|organizations}/{resource-id}/locations/global/replays/{replay-id}`, * where `{resource-id}` is the ID of the project, folder, or organization that owns the * `Replay`. * <p>Example: * `projects/my-example-project/locations/global/replays/506a5f7f-38ce-4d7d-8e03-479ce1833c36` * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Replay getReplay(ReplayName name) { GetReplayRequest request = GetReplayRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return getReplay(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the specified [Replay][google.cloud.policysimulator.v1.Replay]. Each `Replay` is available * for at least 7 days. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * String name = * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]").toString(); * Replay response = simulatorClient.getReplay(name); * } * }</pre> * * @param name Required. The name of the [Replay][google.cloud.policysimulator.v1.Replay] to * retrieve, in the following format: * <p>`{projects|folders|organizations}/{resource-id}/locations/global/replays/{replay-id}`, * where `{resource-id}` is the ID of the project, folder, or organization that owns the * `Replay`. * <p>Example: * `projects/my-example-project/locations/global/replays/506a5f7f-38ce-4d7d-8e03-479ce1833c36` * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Replay getReplay(String name) { GetReplayRequest request = GetReplayRequest.newBuilder().setName(name).build(); return getReplay(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the specified [Replay][google.cloud.policysimulator.v1.Replay]. Each `Replay` is available * for at least 7 days. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * GetReplayRequest request = * GetReplayRequest.newBuilder() * .setName( * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]") * .toString()) * .build(); * Replay response = simulatorClient.getReplay(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Replay getReplay(GetReplayRequest request) { return getReplayCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the specified [Replay][google.cloud.policysimulator.v1.Replay]. Each `Replay` is available * for at least 7 days. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * GetReplayRequest request = * GetReplayRequest.newBuilder() * .setName( * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]") * .toString()) * .build(); * ApiFuture<Replay> future = simulatorClient.getReplayCallable().futureCall(request); * // Do something. * Replay response = future.get(); * } * }</pre> */ public final UnaryCallable<GetReplayRequest, Replay> getReplayCallable() { return stub.getReplayCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates and starts a [Replay][google.cloud.policysimulator.v1.Replay] using the given * [ReplayConfig][google.cloud.policysimulator.v1.ReplayConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * String parent = "parent-995424086"; * Replay replay = Replay.newBuilder().build(); * Replay response = simulatorClient.createReplayAsync(parent, replay).get(); * } * }</pre> * * @param parent Required. The parent resource where this * [Replay][google.cloud.policysimulator.v1.Replay] will be created. This resource must be a * project, folder, or organization with a location. * <p>Example: `projects/my-example-project/locations/global` * @param replay Required. The [Replay][google.cloud.policysimulator.v1.Replay] to create. Set * `Replay.ReplayConfig` to configure the replay. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Replay, ReplayOperationMetadata> createReplayAsync( String parent, Replay replay) { CreateReplayRequest request = CreateReplayRequest.newBuilder().setParent(parent).setReplay(replay).build(); return createReplayAsync(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates and starts a [Replay][google.cloud.policysimulator.v1.Replay] using the given * [ReplayConfig][google.cloud.policysimulator.v1.ReplayConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * CreateReplayRequest request = * CreateReplayRequest.newBuilder() * .setParent("parent-995424086") * .setReplay(Replay.newBuilder().build()) * .build(); * Replay response = simulatorClient.createReplayAsync(request).get(); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final OperationFuture<Replay, ReplayOperationMetadata> createReplayAsync( CreateReplayRequest request) { return createReplayOperationCallable().futureCall(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates and starts a [Replay][google.cloud.policysimulator.v1.Replay] using the given * [ReplayConfig][google.cloud.policysimulator.v1.ReplayConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * CreateReplayRequest request = * CreateReplayRequest.newBuilder() * .setParent("parent-995424086") * .setReplay(Replay.newBuilder().build()) * .build(); * OperationFuture<Replay, ReplayOperationMetadata> future = * simulatorClient.createReplayOperationCallable().futureCall(request); * // Do something. * Replay response = future.get(); * } * }</pre> */ public final OperationCallable<CreateReplayRequest, Replay, ReplayOperationMetadata> createReplayOperationCallable() { return stub.createReplayOperationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates and starts a [Replay][google.cloud.policysimulator.v1.Replay] using the given * [ReplayConfig][google.cloud.policysimulator.v1.ReplayConfig]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * CreateReplayRequest request = * CreateReplayRequest.newBuilder() * .setParent("parent-995424086") * .setReplay(Replay.newBuilder().build()) * .build(); * ApiFuture<Operation> future = simulatorClient.createReplayCallable().futureCall(request); * // Do something. * Operation response = future.get(); * } * }</pre> */ public final UnaryCallable<CreateReplayRequest, Operation> createReplayCallable() { return stub.createReplayCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists the results of running a [Replay][google.cloud.policysimulator.v1.Replay]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * ReplayName parent = * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]"); * for (ReplayResult element : simulatorClient.listReplayResults(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The [Replay][google.cloud.policysimulator.v1.Replay] whose results are * listed, in the following format: * <p>`{projects|folders|organizations}/{resource-id}/locations/global/replays/{replay-id}` * <p>Example: * `projects/my-project/locations/global/replays/506a5f7f-38ce-4d7d-8e03-479ce1833c36` * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListReplayResultsPagedResponse listReplayResults(ReplayName parent) { ListReplayResultsRequest request = ListReplayResultsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listReplayResults(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists the results of running a [Replay][google.cloud.policysimulator.v1.Replay]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * String parent = * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]").toString(); * for (ReplayResult element : simulatorClient.listReplayResults(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The [Replay][google.cloud.policysimulator.v1.Replay] whose results are * listed, in the following format: * <p>`{projects|folders|organizations}/{resource-id}/locations/global/replays/{replay-id}` * <p>Example: * `projects/my-project/locations/global/replays/506a5f7f-38ce-4d7d-8e03-479ce1833c36` * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListReplayResultsPagedResponse listReplayResults(String parent) { ListReplayResultsRequest request = ListReplayResultsRequest.newBuilder().setParent(parent).build(); return listReplayResults(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists the results of running a [Replay][google.cloud.policysimulator.v1.Replay]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * ListReplayResultsRequest request = * ListReplayResultsRequest.newBuilder() * .setParent( * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]") * .toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (ReplayResult element : simulatorClient.listReplayResults(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListReplayResultsPagedResponse listReplayResults(ListReplayResultsRequest request) { return listReplayResultsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists the results of running a [Replay][google.cloud.policysimulator.v1.Replay]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * ListReplayResultsRequest request = * ListReplayResultsRequest.newBuilder() * .setParent( * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]") * .toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<ReplayResult> future = * simulatorClient.listReplayResultsPagedCallable().futureCall(request); * // Do something. * for (ReplayResult element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListReplayResultsRequest, ListReplayResultsPagedResponse> listReplayResultsPagedCallable() { return stub.listReplayResultsPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists the results of running a [Replay][google.cloud.policysimulator.v1.Replay]. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (SimulatorClient simulatorClient = SimulatorClient.create()) { * ListReplayResultsRequest request = * ListReplayResultsRequest.newBuilder() * .setParent( * ReplayName.ofProjectLocationReplayName("[PROJECT]", "[LOCATION]", "[REPLAY]") * .toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * ListReplayResultsResponse response = * simulatorClient.listReplayResultsCallable().call(request); * for (ReplayResult element : response.getReplayResultsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListReplayResultsRequest, ListReplayResultsResponse> listReplayResultsCallable() { return stub.listReplayResultsCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListReplayResultsPagedResponse extends AbstractPagedListResponse< ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult, ListReplayResultsPage, ListReplayResultsFixedSizeCollection> { public static ApiFuture<ListReplayResultsPagedResponse> createAsync( PageContext<ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult> context, ApiFuture<ListReplayResultsResponse> futureResponse) { ApiFuture<ListReplayResultsPage> futurePage = ListReplayResultsPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListReplayResultsPagedResponse(input), MoreExecutors.directExecutor()); } private ListReplayResultsPagedResponse(ListReplayResultsPage page) { super(page, ListReplayResultsFixedSizeCollection.createEmptyCollection()); } } public static class ListReplayResultsPage extends AbstractPage< ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult, ListReplayResultsPage> { private ListReplayResultsPage( PageContext<ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult> context, ListReplayResultsResponse response) { super(context, response); } private static ListReplayResultsPage createEmptyPage() { return new ListReplayResultsPage(null, null); } @Override protected ListReplayResultsPage createPage( PageContext<ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult> context, ListReplayResultsResponse response) { return new ListReplayResultsPage(context, response); } @Override public ApiFuture<ListReplayResultsPage> createPageAsync( PageContext<ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult> context, ApiFuture<ListReplayResultsResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListReplayResultsFixedSizeCollection extends AbstractFixedSizeCollection< ListReplayResultsRequest, ListReplayResultsResponse, ReplayResult, ListReplayResultsPage, ListReplayResultsFixedSizeCollection> { private ListReplayResultsFixedSizeCollection( List<ListReplayResultsPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListReplayResultsFixedSizeCollection createEmptyCollection() { return new ListReplayResultsFixedSizeCollection(null, 0); } @Override protected ListReplayResultsFixedSizeCollection createCollection( List<ListReplayResultsPage> pages, int collectionSize) { return new ListReplayResultsFixedSizeCollection(pages, collectionSize); } } }
apache/samza
35,301
samza-core/src/test/java/org/apache/samza/container/TestRunLoop.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.container; import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.samza.Partition; import org.apache.samza.SamzaException; import org.apache.samza.checkpoint.OffsetManager; import org.apache.samza.config.RunLoopConfig; import org.apache.samza.metrics.MetricsRegistryMap; import org.apache.samza.system.IncomingMessageEnvelope; import org.apache.samza.system.SystemConsumers; import org.apache.samza.system.SystemStreamPartition; import org.apache.samza.task.ReadableCoordinator; import org.apache.samza.task.TaskCallback; import org.apache.samza.task.TaskCallbackFactory; import org.apache.samza.task.TaskCoordinator; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class TestRunLoop { // Immutable objects shared by all test methods. private final String runId = "foo"; private final ExecutorService executor = null; private final SamzaContainerMetrics containerMetrics = new SamzaContainerMetrics("container", new MetricsRegistryMap(), ""); private final long windowMs = -1; private final long commitMs = -1; private final long callbackTimeoutMs = 0; private final long drainCallbackTimeoutMs = 0; private final long maxThrottlingDelayMs = 0; private final long maxIdleMs = 10; private final int elasticityFactor = 1; private final boolean isHighLevelApiJob = false; private final Partition p0 = new Partition(0); private final Partition p1 = new Partition(1); private final TaskName taskName0 = new TaskName(p0.toString()); private final TaskName taskName1 = new TaskName(p1.toString()); private final SystemStreamPartition sspA0 = new SystemStreamPartition("testSystem", "testStreamA", p0); private final SystemStreamPartition sspA1 = new SystemStreamPartition("testSystem", "testStreamA", p1); private final SystemStreamPartition sspB0 = new SystemStreamPartition("testSystem", "testStreamB", p0); private final SystemStreamPartition sspB1 = new SystemStreamPartition("testSystem", "testStreamB", p1); private final IncomingMessageEnvelope envelopeA00 = new IncomingMessageEnvelope(sspA0, "0", "key0", "value0"); private final IncomingMessageEnvelope envelopeA11 = new IncomingMessageEnvelope(sspA1, "1", "key1", "value1"); private final IncomingMessageEnvelope envelopeA01 = new IncomingMessageEnvelope(sspA0, "1", "key0", "value0"); private final IncomingMessageEnvelope envelopeB00 = new IncomingMessageEnvelope(sspB0, "0", "key0", "value0"); private final IncomingMessageEnvelope envelopeB11 = new IncomingMessageEnvelope(sspB1, "1", "key1", "value1"); private final IncomingMessageEnvelope sspA0EndOfStream = IncomingMessageEnvelope.buildEndOfStreamEnvelope(sspA0); private final IncomingMessageEnvelope sspA1EndOfStream = IncomingMessageEnvelope.buildEndOfStreamEnvelope(sspA1); private final IncomingMessageEnvelope sspB0EndOfStream = IncomingMessageEnvelope.buildEndOfStreamEnvelope(sspB0); private final IncomingMessageEnvelope sspB1EndOfStream = IncomingMessageEnvelope.buildEndOfStreamEnvelope(sspB1); private final IncomingMessageEnvelope sspA0Drain = IncomingMessageEnvelope.buildDrainMessage(sspA0, runId); private final IncomingMessageEnvelope sspA1Drain = IncomingMessageEnvelope.buildDrainMessage(sspA1, runId); private final IncomingMessageEnvelope sspB0Drain = IncomingMessageEnvelope.buildDrainMessage(sspB0, runId); private final IncomingMessageEnvelope sspB1Drain = IncomingMessageEnvelope.buildDrainMessage(sspB1, runId); private final IncomingMessageEnvelope watermarkA0 = IncomingMessageEnvelope.buildWatermarkEnvelope(sspA0, 1L); @Rule public Timeout maxTestDurationInSeconds = Timeout.seconds(120); @Mock private RunLoopConfig mockRunLoopConfig; @Before public void init() { MockitoAnnotations.initMocks(this); when(mockRunLoopConfig.getMaxConcurrency()).thenReturn(1); when(mockRunLoopConfig.getWindowMs()).thenReturn(windowMs); when(mockRunLoopConfig.getCommitMs()).thenReturn(commitMs); when(mockRunLoopConfig.getTaskCallbackTimeoutMs()).thenReturn(callbackTimeoutMs); when(mockRunLoopConfig.getDrainCallbackTimeoutMs()).thenReturn(drainCallbackTimeoutMs); when(mockRunLoopConfig.getMaxIdleMs()).thenReturn(maxIdleMs); when(mockRunLoopConfig.getMaxThrottlingDelayMs()).thenReturn(maxThrottlingDelayMs); when(mockRunLoopConfig.asyncCommitEnabled()).thenReturn(false); when(mockRunLoopConfig.getElasticityFactor()).thenReturn(elasticityFactor); when(mockRunLoopConfig.getRunId()).thenReturn(runId); when(mockRunLoopConfig.isHighLevelApiJob()).thenReturn(false); } @Test public void testProcessMultipleTasks() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); tasks.put(taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA11).thenReturn(sspA0EndOfStream).thenReturn( sspA1EndOfStream).thenReturn(null); runLoop.run(); verify(task0).process(eq(envelopeA00), any(), any()); verify(task1).process(eq(envelopeA11), any(), any()); assertEquals(4L, containerMetrics.envelopes().getCount()); } @Test public void testProcessInOrder() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA01).thenReturn(sspA0EndOfStream).thenReturn(null); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); runLoop.run(); InOrder inOrder = inOrder(task0); inOrder.verify(task0).process(eq(envelopeA00), any(), any()); inOrder.verify(task0).process(eq(envelopeA01), any(), any()); } @Test public void testProcessCallbacksCompletedOutOfOrder() { int maxMessagesInFlight = 2; ExecutorService taskExecutor = Executors.newFixedThreadPool(1); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); OffsetManager offsetManager = mock(OffsetManager.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); when(task0.offsetManager()).thenReturn(offsetManager); CountDownLatch firstMessageBarrier = new CountDownLatch(1); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); taskExecutor.submit(() -> { firstMessageBarrier.await(); coordinator.commit(TaskCoordinator.RequestScope.CURRENT_TASK); coordinator.shutdown(TaskCoordinator.RequestScope.CURRENT_TASK); callback.complete(); return null; }); return null; }).when(task0).process(eq(envelopeA00), any(), any()); doAnswer(invocation -> { assertEquals(1, task0.metrics().messagesInFlight().getValue()); assertEquals(0, task0.metrics().asyncCallbackCompleted().getCount()); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); callback.complete(); firstMessageBarrier.countDown(); return null; }).when(task0).process(eq(envelopeA01), any(), any()); when(mockRunLoopConfig.getMaxConcurrency()).thenReturn(maxMessagesInFlight); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA01).thenReturn(null); runLoop.run(); InOrder inOrder = inOrder(task0); inOrder.verify(task0).process(eq(envelopeA00), any(), any()); inOrder.verify(task0).process(eq(envelopeA01), any(), any()); verify(offsetManager).update(eq(taskName0), eq(sspA0), eq(envelopeA00.getOffset())); assertEquals(2L, containerMetrics.processes().getCount()); assertEquals(1L, containerMetrics.containerRunning().getValue()); } @Test public void testProcessElasticityEnabled() { TaskName taskName0 = new TaskName(p0.toString() + " 0"); SystemStreamPartition ssp = new SystemStreamPartition("testSystem", "testStreamA", p0); SystemStreamPartition ssp0 = new SystemStreamPartition("testSystem", "testStreamA", p0, 0); SystemStreamPartition ssp1 = new SystemStreamPartition("testSystem", "testStreamA", p0, 1); // create two IME such that one of their ssp keybucket maps to ssp0 and the other one maps to ssp1 // task in the runloop should process only the first ime (aka the one whose ssp keybucket is ssp0) IncomingMessageEnvelope envelope00 = spy(new IncomingMessageEnvelope(ssp, "0", "key0", "value0")); IncomingMessageEnvelope envelope01 = spy(new IncomingMessageEnvelope(ssp, "1", "key0", "value0")); when(envelope00.getSystemStreamPartition(2)).thenReturn(ssp0); when(envelope01.getSystemStreamPartition(2)).thenReturn(ssp1); // have a single task in the run loop that processes ssp0 -> 0th keybucket of ssp RunLoopTask task0 = getMockRunLoopTask(taskName0, ssp0); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); callback.complete(); return null; }).when(task0).process(eq(envelope00), any(), any()); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); callback.complete(); return null; }).when(task0).process(eq(envelope01), any(), any()); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); when(consumerMultiplexer.choose(false)).thenReturn(envelope00).thenReturn(envelope01).thenReturn(sspA0EndOfStream).thenReturn(null); when(mockRunLoopConfig.getElasticityFactor()).thenReturn(2); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); runLoop.run(); verify(task0).process(eq(envelope00), any(), any()); verify(task0, never()).process(eq(envelope01), any(), any()); assertEquals(2, containerMetrics.envelopes().getCount()); // envelop00 and end of stream assertEquals(1, containerMetrics.processes().getCount()); // only envelope00 and not envelope01 and not end of stream } @Test public void testDrainForTasksWithSingleSSP() { TaskName taskName0 = new TaskName(p0.toString() + " 0"); TaskName taskName1 = new TaskName(p1.toString() + " 1"); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); // insert all envelopes followed by drain messages when(consumerMultiplexer.choose(false)) .thenReturn(envelopeA00).thenReturn(envelopeA11) .thenReturn(sspA0Drain).thenReturn(sspA1Drain); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0, taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); runLoop.run(); // check if process was called once for each task verify(task0, times(1)).process(any(), any(), any()); verify(task1, times(1)).process(any(), any(), any()); // check if drain was called once for each task followed by commit verify(task0, times(1)).drain(any()); verify(task1, times(1)).drain(any()); verify(task0, times(1)).commit(); verify(task1, times(1)).commit(); } @Test public void testDrainForTasksWithMultipleSSP() { TaskName taskName0 = new TaskName(p0.toString() + " 0"); TaskName taskName1 = new TaskName(p1.toString() + " 1"); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0, sspB0); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1, sspB1); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); // insert all envelopes followed by drain messages when(consumerMultiplexer.choose(false)) .thenReturn(envelopeA00).thenReturn(envelopeA11).thenReturn(envelopeB00).thenReturn(envelopeB11) .thenReturn(sspA0Drain).thenReturn(sspA1Drain).thenReturn(sspB0Drain).thenReturn(sspB1Drain); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0, taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); runLoop.run(); // check if process was called twice for each task verify(task0, times(2)).process(any(), any(), any()); verify(task1, times(2)).process(any(), any(), any()); // check if drain was called once for each task followed by commit verify(task0, times(1)).drain(any()); verify(task1, times(1)).drain(any()); verify(task0, times(1)).commit(); verify(task1, times(1)).commit(); } @Test public void testWindow() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); long windowMs = 1; RunLoopTask task = getMockRunLoopTask(taskName0, sspA0); when(task.isWindowableTask()).thenReturn(true); final AtomicInteger windowCount = new AtomicInteger(0); doAnswer(x -> { windowCount.incrementAndGet(); if (windowCount.get() == 4) { x.getArgumentAt(0, ReadableCoordinator.class).shutdown(TaskCoordinator.RequestScope.CURRENT_TASK); } return null; }).when(task).window(any()); when(mockRunLoopConfig.getWindowMs()).thenReturn(windowMs); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(null); runLoop.run(); verify(task, times(4)).window(any()); } @Test public void testCommitSingleTask() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); coordinator.commit(TaskCoordinator.RequestScope.CURRENT_TASK); coordinator.shutdown(TaskCoordinator.RequestScope.ALL_TASKS_IN_CONTAINER); callback.complete(); return null; }).when(task0).process(eq(envelopeA00), any(), any()); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(this.taskName0, task0); tasks.put(taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); //have a null message in between to make sure task0 finishes processing and invoke the commit when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA11).thenReturn(null); runLoop.run(); verify(task0).process(any(), any(), any()); verify(task1).process(any(), any(), any()); verify(task0).commit(); verify(task1, never()).commit(); } @Test public void testCommitAllTasks() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); coordinator.commit(TaskCoordinator.RequestScope.ALL_TASKS_IN_CONTAINER); coordinator.shutdown(TaskCoordinator.RequestScope.ALL_TASKS_IN_CONTAINER); callback.complete(); return null; }).when(task0).process(eq(envelopeA00), any(), any()); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(this.taskName0, task0); tasks.put(taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); //have a null message in between to make sure task0 finishes processing and invoke the commit when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA11).thenReturn(null); runLoop.run(); verify(task0).process(any(), any(), any()); verify(task1).process(any(), any(), any()); verify(task0).commit(); verify(task1).commit(); } @Test public void testShutdownOnConsensus() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); coordinator.shutdown(TaskCoordinator.RequestScope.CURRENT_TASK); callback.complete(); return null; }).when(task0).process(eq(envelopeA00), any(), any()); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); coordinator.shutdown(TaskCoordinator.RequestScope.CURRENT_TASK); callback.complete(); return null; }).when(task1).process(eq(envelopeA11), any(), any()); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); tasks.put(taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); // consensus is reached after envelope1 is processed. when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA11).thenReturn(null); runLoop.run(); verify(task0).process(any(), any(), any()); verify(task1).process(any(), any(), any()); assertEquals(2L, containerMetrics.envelopes().getCount()); assertEquals(2L, containerMetrics.processes().getCount()); } @Test public void testEndOfStreamWithMultipleTasks() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0, sspB0); RunLoopTask task1 = getMockRunLoopTask(taskName1, sspA1, sspB1); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); tasks.put(taskName1, task1); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)) .thenReturn(envelopeA00) .thenReturn(envelopeA11) .thenReturn(envelopeB00) .thenReturn(envelopeB11) .thenReturn(sspA0EndOfStream) .thenReturn(sspB0EndOfStream) .thenReturn(sspB1EndOfStream) .thenReturn(sspA1EndOfStream) .thenReturn(null); runLoop.run(); verify(task0).process(eq(envelopeA00), any(), any()); verify(task0).process(eq(envelopeB00), any(), any()); verify(task0).endOfStream(any()); verify(task1).process(eq(envelopeA11), any(), any()); verify(task1).process(eq(envelopeB11), any(), any()); verify(task1).endOfStream(any()); assertEquals(8L, containerMetrics.envelopes().getCount()); } @Test public void testEndOfStreamWaitsForInFlightMessages() { int maxMessagesInFlight = 2; ExecutorService taskExecutor = Executors.newFixedThreadPool(1); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); OffsetManager offsetManager = mock(OffsetManager.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); when(task0.offsetManager()).thenReturn(offsetManager); CountDownLatch firstMessageBarrier = new CountDownLatch(2); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); taskExecutor.submit(() -> { firstMessageBarrier.await(); callback.complete(); return null; }); return null; }).when(task0).process(eq(envelopeA00), any(), any()); doAnswer(invocation -> { assertEquals(1, task0.metrics().messagesInFlight().getValue()); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); callback.complete(); firstMessageBarrier.countDown(); return null; }).when(task0).process(eq(envelopeA01), any(), any()); doAnswer(invocation -> { assertEquals(0, task0.metrics().messagesInFlight().getValue()); assertEquals(2, task0.metrics().asyncCallbackCompleted().getCount()); return null; }).when(task0).endOfStream(any()); when(mockRunLoopConfig.getMaxConcurrency()).thenReturn(maxMessagesInFlight); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA01).thenReturn(sspA0EndOfStream) .thenAnswer(invocation -> { // this ensures that the end of stream message has passed through run loop BEFORE the last remaining in flight message completes firstMessageBarrier.countDown(); return null; }); runLoop.run(); verify(task0).endOfStream(any()); } @Test public void testDrainWaitsForInFlightMessages() { int maxMessagesInFlight = 2; ExecutorService taskExecutor = Executors.newFixedThreadPool(1); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); OffsetManager offsetManager = mock(OffsetManager.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); when(task0.offsetManager()).thenReturn(offsetManager); CountDownLatch firstMessageBarrier = new CountDownLatch(2); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); taskExecutor.submit(() -> { firstMessageBarrier.await(); callback.complete(); return null; }); return null; }).when(task0).process(eq(envelopeA00), any(), any()); doAnswer(invocation -> { assertEquals(1, task0.metrics().messagesInFlight().getValue()); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); callback.complete(); firstMessageBarrier.countDown(); return null; }).when(task0).process(eq(envelopeA01), any(), any()); doAnswer(invocation -> { assertEquals(0, task0.metrics().messagesInFlight().getValue()); assertEquals(2, task0.metrics().asyncCallbackCompleted().getCount()); return null; }).when(task0).drain(any()); when(mockRunLoopConfig.getMaxConcurrency()).thenReturn(maxMessagesInFlight); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA01).thenReturn(sspA0Drain) .thenAnswer(invocation -> { // this ensures that the drain message has passed through run loop BEFORE the flight message // completes firstMessageBarrier.countDown(); return null; }); runLoop.run(); verify(task0).drain(any()); } @Test public void testEndOfStreamCommitBehavior() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(0, ReadableCoordinator.class); coordinator.commit(TaskCoordinator.RequestScope.CURRENT_TASK); return null; }).when(task0).endOfStream(any()); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(sspA0EndOfStream).thenReturn(null); runLoop.run(); InOrder inOrder = inOrder(task0); inOrder.verify(task0).endOfStream(any()); inOrder.verify(task0).commit(); } @Test public void testDrainCommitBehavior() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(sspA0Drain).thenReturn(null); runLoop.run(); InOrder inOrder = inOrder(task0); inOrder.verify(task0).drain(any()); inOrder.verify(task0).commit(); } @Test public void testCommitWithMessageInFlightWhenAsyncCommitIsEnabled() { int maxMessagesInFlight = 2; ExecutorService taskExecutor = Executors.newFixedThreadPool(2); SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); OffsetManager offsetManager = mock(OffsetManager.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); when(task0.offsetManager()).thenReturn(offsetManager); CountDownLatch firstMessageBarrier = new CountDownLatch(1); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); taskExecutor.submit(() -> { firstMessageBarrier.await(); coordinator.commit(TaskCoordinator.RequestScope.CURRENT_TASK); callback.complete(); return null; }); return null; }).when(task0).process(eq(envelopeA00), any(), any()); CountDownLatch secondMessageBarrier = new CountDownLatch(1); doAnswer(invocation -> { ReadableCoordinator coordinator = invocation.getArgumentAt(1, ReadableCoordinator.class); TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); taskExecutor.submit(() -> { // let the first message proceed to ask for a commit firstMessageBarrier.countDown(); // block this message until commit is executed secondMessageBarrier.await(); coordinator.shutdown(TaskCoordinator.RequestScope.CURRENT_TASK); callback.complete(); return null; }); return null; }).when(task0).process(eq(envelopeA01), any(), any()); doAnswer(invocation -> { assertEquals(1, task0.metrics().asyncCallbackCompleted().getCount()); assertEquals(1, task0.metrics().messagesInFlight().getValue()); secondMessageBarrier.countDown(); return null; }).when(task0).commit(); when(mockRunLoopConfig.getMaxConcurrency()).thenReturn(maxMessagesInFlight); when(mockRunLoopConfig.asyncCommitEnabled()).thenReturn(true); Map<TaskName, RunLoopTask> tasks = new HashMap<>(); tasks.put(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)).thenReturn(envelopeA00).thenReturn(envelopeA01).thenReturn(null); runLoop.run(); InOrder inOrder = inOrder(task0); inOrder.verify(task0).process(eq(envelopeA00), any(), any()); inOrder.verify(task0).process(eq(envelopeA01), any(), any()); inOrder.verify(task0).commit(); } @Test(expected = SamzaException.class) public void testExceptionIsPropagated() { SystemConsumers consumerMultiplexer = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); callbackFactory.createCallback().failure(new Exception("Intentional failure")); return null; }).when(task0).process(eq(envelopeA00), any(), any()); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumerMultiplexer, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumerMultiplexer.choose(false)) .thenReturn(envelopeA00) .thenReturn(sspA0EndOfStream) .thenReturn(null); runLoop.run(); } @Test public void testWatermarkCallbackTimeout() throws InterruptedException { final CountDownLatch watermarkProcessLatch = new CountDownLatch(1); when(mockRunLoopConfig.getTaskCallbackTimeoutMs()).thenReturn(5L); when(mockRunLoopConfig.getWatermarkCallbackTimeoutMs()).thenReturn(15L); SystemConsumers consumers = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); Thread.sleep(10); callback.complete(); return null; }).when(task0).process(eq(watermarkA0), any(), any()); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); callbackFactory.createCallback().complete(); return null; }).when(task0).process(eq(envelopeA00), any(), any()); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); watermarkProcessLatch.countDown(); callbackFactory.createCallback().complete(); return null; }).when(task0).process(eq(envelopeA01), any(), any()); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumers, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumers.choose(false)) .thenReturn(envelopeA00) .thenReturn(watermarkA0) .thenReturn(envelopeA01) .thenReturn(sspA0EndOfStream) .thenReturn(null); runLoop.run(); assertTrue(watermarkProcessLatch.await(15L, TimeUnit.MILLISECONDS)); } @Test public void testWatermarkCallbackTimeoutThrowsException() { when(mockRunLoopConfig.getTaskCallbackTimeoutMs()).thenReturn(10L); when(mockRunLoopConfig.getWatermarkCallbackTimeoutMs()).thenReturn(1L); SystemConsumers consumers = mock(SystemConsumers.class); RunLoopTask task0 = getMockRunLoopTask(taskName0, sspA0); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); TaskCallback callback = callbackFactory.createCallback(); Thread.sleep(5); callback.complete(); return null; }).when(task0).process(eq(watermarkA0), any(), any()); doAnswer(invocation -> { TaskCallbackFactory callbackFactory = invocation.getArgumentAt(2, TaskCallbackFactory.class); callbackFactory.createCallback().complete(); return null; }).when(task0).process(eq(envelopeA00), any(), any()); Map<TaskName, RunLoopTask> tasks = ImmutableMap.of(taskName0, task0); RunLoop runLoop = new RunLoop(tasks, executor, consumers, containerMetrics, () -> 0L, mockRunLoopConfig); when(consumers.choose(false)) .thenReturn(envelopeA00) .thenReturn(watermarkA0) .thenReturn(null); try { runLoop.run(); fail("Watermark callback should have timed out and failed run loop"); } catch (SamzaException e) { } } private RunLoopTask getMockRunLoopTask(TaskName taskName, SystemStreamPartition ... ssps) { RunLoopTask task0 = mock(RunLoopTask.class); when(task0.systemStreamPartitions()).thenReturn(new HashSet<>(Arrays.asList(ssps))); when(task0.metrics()).thenReturn(new TaskInstanceMetrics("test", new MetricsRegistryMap(), "")); when(task0.taskName()).thenReturn(taskName); return task0; } }
apache/logging-log4j2
35,167
log4j-1.2-api/src/main/java/org/apache/log4j/xml/XmlConfiguration.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.log4j.xml; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.IOException; import java.io.InterruptedIOException; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.IntStream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.FactoryConfigurationError; import org.apache.log4j.Appender; import org.apache.log4j.Layout; import org.apache.log4j.Level; import org.apache.log4j.bridge.AppenderAdapter; import org.apache.log4j.bridge.FilterAdapter; import org.apache.log4j.config.Log4j1Configuration; import org.apache.log4j.config.PropertySetter; import org.apache.log4j.helpers.OptionConverter; import org.apache.log4j.rewrite.RewritePolicy; import org.apache.log4j.spi.AppenderAttachable; import org.apache.log4j.spi.ErrorHandler; import org.apache.log4j.spi.Filter; import org.apache.logging.log4j.core.Filter.Result; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.rolling.TriggeringPolicy; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.ConfigurationSource; import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.logging.log4j.core.config.status.StatusConfiguration; import org.apache.logging.log4j.core.filter.ThresholdFilter; import org.apache.logging.log4j.status.StatusLogger; import org.apache.logging.log4j.util.LoaderUtil; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * Class Description goes here. */ public class XmlConfiguration extends Log4j1Configuration { private static final org.apache.logging.log4j.Logger LOGGER = StatusLogger.getLogger(); private static final String CONFIGURATION_TAG = "log4j:configuration"; private static final String OLD_CONFIGURATION_TAG = "configuration"; private static final String RENDERER_TAG = "renderer"; private static final String APPENDER_TAG = "appender"; public static final String PARAM_TAG = "param"; public static final String LAYOUT_TAG = "layout"; private static final String CATEGORY = "category"; private static final String LOGGER_ELEMENT = "logger"; private static final String CATEGORY_FACTORY_TAG = "categoryFactory"; private static final String LOGGER_FACTORY_TAG = "loggerFactory"; public static final String NAME_ATTR = "name"; private static final String CLASS_ATTR = "class"; public static final String VALUE_ATTR = "value"; private static final String ROOT_TAG = "root"; private static final String LEVEL_TAG = "level"; private static final String PRIORITY_TAG = "priority"; public static final String FILTER_TAG = "filter"; private static final String ERROR_HANDLER_TAG = "errorHandler"; public static final String REF_ATTR = "ref"; private static final String ADDITIVITY_ATTR = "additivity"; private static final String CONFIG_DEBUG_ATTR = "configDebug"; private static final String INTERNAL_DEBUG_ATTR = "debug"; private static final String THRESHOLD_ATTR = "threshold"; private static final String EMPTY_STR = ""; private static final String dbfKey = "javax.xml.parsers.DocumentBuilderFactory"; private static final String THROWABLE_RENDERER_TAG = "throwableRenderer"; public static final long DEFAULT_DELAY = 60000; /** * File name prefix for test configurations. */ protected static final String TEST_PREFIX = "log4j-test"; /** * File name prefix for standard configurations. */ protected static final String DEFAULT_PREFIX = "log4j"; // key: appenderName, value: appender private final Map<String, Appender> appenderMap; private final Properties props = null; public XmlConfiguration( final LoggerContext loggerContext, final ConfigurationSource source, final int monitorIntervalSeconds) { super(loggerContext, source, monitorIntervalSeconds); appenderMap = new HashMap<>(); } public void addAppenderIfAbsent(Appender appender) { appenderMap.putIfAbsent(appender.getName(), appender); } /** * Configures log4j by reading in a log4j.dtd compliant XML * configuration file. */ @Override public void doConfigure() throws FactoryConfigurationError { final ConfigurationSource source = getConfigurationSource(); final ParseAction action = new ParseAction() { @Override @SuppressFBWarnings( value = "XXE_DOCUMENT", justification = "The `DocumentBuilder` is configured to not resolve external entities.") public Document parse(final DocumentBuilder parser) throws SAXException, IOException { @SuppressWarnings("resource") final // The ConfigurationSource and its caller manages the InputStream. InputSource inputSource = new InputSource(source.getInputStream()); inputSource.setSystemId("dummy://log4j.dtd"); return parser.parse(inputSource); } @Override public String toString() { return getConfigurationSource().getLocation(); } }; doConfigure(action); } private void doConfigure(final ParseAction action) throws FactoryConfigurationError { DocumentBuilderFactory dbf; try { LOGGER.debug("System property is : {}", OptionConverter.getSystemProperty(dbfKey, null)); dbf = DocumentBuilderFactory.newInstance(); LOGGER.debug("Standard DocumentBuilderFactory search succeeded."); LOGGER.debug("DocumentBuilderFactory is: " + dbf.getClass().getName()); } catch (FactoryConfigurationError fce) { final Exception e = fce.getException(); LOGGER.debug("Could not instantiate a DocumentBuilderFactory.", e); throw fce; } try { dbf.setValidating(true); final DocumentBuilder docBuilder = dbf.newDocumentBuilder(); docBuilder.setErrorHandler(new SAXErrorHandler()); docBuilder.setEntityResolver(new Log4jEntityResolver()); final Document doc = action.parse(docBuilder); parse(doc.getDocumentElement()); } catch (Exception e) { if (e instanceof InterruptedException || e instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } // I know this is miserable... LOGGER.error("Could not parse " + action.toString() + ".", e); } } @Override public Configuration reconfigure() { try { final ConfigurationSource source = getConfigurationSource().resetInputStream(); if (source == null) { return null; } final XmlConfigurationFactory factory = new XmlConfigurationFactory(); final XmlConfiguration config = (XmlConfiguration) factory.getConfiguration(getLoggerContext(), source); return config == null || config.getState() != State.INITIALIZING ? null : config; } catch (final IOException ex) { LOGGER.error("Cannot locate file {}: {}", getConfigurationSource(), ex); } return null; } /** * Delegates unrecognized content to created instance if it supports UnrecognizedElementParser. * * @param instance instance, may be null. * @param element element, may not be null. * @param props properties * @throws IOException thrown if configuration of owner object should be abandoned. */ private void parseUnrecognizedElement(final Object instance, final Element element, final Properties props) throws Exception { boolean recognized = false; if (instance instanceof UnrecognizedElementHandler) { recognized = ((UnrecognizedElementHandler) instance).parseUnrecognizedElement(element, props); } if (!recognized) { LOGGER.warn("Unrecognized element {}", element.getNodeName()); } } /** * Delegates unrecognized content to created instance if * it supports UnrecognizedElementParser and catches and * logs any exception. * * @param instance instance, may be null. * @param element element, may not be null. * @param props properties * @since 1.2.15 */ private void quietParseUnrecognizedElement(final Object instance, final Element element, final Properties props) { try { parseUnrecognizedElement(instance, element, props); } catch (Exception ex) { if (ex instanceof InterruptedException || ex instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Error in extension content: ", ex); } } /** * Substitutes property value for any references in expression. * * @param value value from configuration file, may contain * literal text, property references or both * @param props properties. * @return evaluated expression, may still contain expressions * if unable to expand. */ public String subst(final String value, final Properties props) { try { return OptionConverter.substVars(value, props); } catch (IllegalArgumentException e) { LOGGER.warn("Could not perform variable substitution.", e); return value; } } /** * Sets a parameter based from configuration file content. * * @param elem param element, may not be null. * @param propSetter property setter, may not be null. * @param props properties * @since 1.2.15 */ public void setParameter(final Element elem, final PropertySetter propSetter, final Properties props) { final String name = subst(elem.getAttribute("name"), props); String value = (elem.getAttribute("value")); value = subst(OptionConverter.convertSpecialChars(value), props); propSetter.setProperty(name, value); } /** * Creates an object and processes any nested param elements * but does not call activateOptions. If the class also supports * UnrecognizedElementParser, the parseUnrecognizedElement method * will be call for any child elements other than param. * * @param element element, may not be null. * @param props properties * @param expectedClass interface or class expected to be implemented * by created class * @return created class or null. * @throws Exception thrown if the contain object should be abandoned. * @since 1.2.15 */ public Object parseElement( final Element element, final Properties props, @SuppressWarnings("rawtypes") final Class expectedClass) throws Exception { final String clazz = subst(element.getAttribute("class"), props); final Object instance = OptionConverter.instantiateByClassName(clazz, expectedClass, null); if (instance != null) { final PropertySetter propSetter = new PropertySetter(instance); final NodeList children = element.getChildNodes(); final int length = children.getLength(); for (int loop = 0; loop < length; loop++) { final Node currentNode = children.item(loop); if (currentNode.getNodeType() == Node.ELEMENT_NODE) { final Element currentElement = (Element) currentNode; final String tagName = currentElement.getTagName(); if (tagName.equals("param")) { setParameter(currentElement, propSetter, props); } else { parseUnrecognizedElement(instance, currentElement, props); } } } return instance; } return null; } /** * Used internally to parse appenders by IDREF name. */ private Appender findAppenderByName(final Document doc, final String appenderName) { Appender appender = appenderMap.get(appenderName); if (appender != null) { return appender; } // Endre's hack: Element element = null; final NodeList list = doc.getElementsByTagName("appender"); for (int t = 0; t < list.getLength(); t++) { final Node node = list.item(t); final NamedNodeMap map = node.getAttributes(); final Node attrNode = map.getNamedItem("name"); if (appenderName.equals(attrNode.getNodeValue())) { element = (Element) node; break; } } // Hack finished. if (element == null) { LOGGER.error("No appender named [{}] could be found.", appenderName); return null; } appender = parseAppender(element); if (appender != null) { appenderMap.put(appenderName, appender); } return appender; } /** * Used internally to parse appenders by IDREF element. * @param appenderRef The Appender Reference Element. * @return The Appender. */ public Appender findAppenderByReference(final Element appenderRef) { final String appenderName = subst(appenderRef.getAttribute(REF_ATTR)); final Document doc = appenderRef.getOwnerDocument(); return findAppenderByName(doc, appenderName); } /** * Used internally to parse an appender element. * @param appenderElement The Appender Element. * @return The Appender. */ public Appender parseAppender(final Element appenderElement) { final String className = subst(appenderElement.getAttribute(CLASS_ATTR)); LOGGER.debug("Class name: [" + className + ']'); Appender appender = manager.parseAppender(className, appenderElement, this); if (appender == null) { appender = buildAppender(className, appenderElement); } return appender; } private Appender buildAppender(final String className, final Element appenderElement) { try { final Appender appender = LoaderUtil.newInstanceOf(className); final PropertySetter propSetter = new PropertySetter(appender); appender.setName(subst(appenderElement.getAttribute(NAME_ATTR))); final AtomicReference<Filter> filterChain = new AtomicReference<>(); forEachElement(appenderElement.getChildNodes(), currentElement -> { // Parse appender parameters switch (currentElement.getTagName()) { case PARAM_TAG: setParameter(currentElement, propSetter); break; case LAYOUT_TAG: appender.setLayout(parseLayout(currentElement)); break; case FILTER_TAG: addFilter(filterChain, currentElement); break; case ERROR_HANDLER_TAG: parseErrorHandler(currentElement, appender); break; case APPENDER_REF_TAG: final String refName = subst(currentElement.getAttribute(REF_ATTR)); if (appender instanceof AppenderAttachable) { final AppenderAttachable aa = (AppenderAttachable) appender; final Appender child = findAppenderByReference(currentElement); LOGGER.debug( "Attaching appender named [{}] to appender named [{}].", refName, appender.getName()); aa.addAppender(child); } else { LOGGER.error( "Requesting attachment of appender named [{}] to appender named [{}]" + "which does not implement org.apache.log4j.spi.AppenderAttachable.", refName, appender.getName()); } break; default: try { parseUnrecognizedElement(appender, currentElement, props); } catch (Exception ex) { throw new ConsumerException(ex); } } }); final Filter head = filterChain.get(); if (head != null) { appender.addFilter(head); } propSetter.activate(); return appender; } catch (ConsumerException ex) { final Throwable t = ex.getCause(); if (t instanceof InterruptedException || t instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create an Appender. Reported error follows.", t); } catch (Exception oops) { if (oops instanceof InterruptedException || oops instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create an Appender. Reported error follows.", oops); } return null; } public RewritePolicy parseRewritePolicy(final Element rewritePolicyElement) { final String className = subst(rewritePolicyElement.getAttribute(CLASS_ATTR)); LOGGER.debug("Class name: [" + className + ']'); RewritePolicy policy = manager.parseRewritePolicy(className, rewritePolicyElement, this); if (policy == null) { policy = buildRewritePolicy(className, rewritePolicyElement); } return policy; } private RewritePolicy buildRewritePolicy(String className, Element element) { try { final RewritePolicy policy = LoaderUtil.newInstanceOf(className); final PropertySetter propSetter = new PropertySetter(policy); forEachElement(element.getChildNodes(), currentElement -> { if (currentElement.getTagName().equalsIgnoreCase(PARAM_TAG)) { setParameter(currentElement, propSetter); } }); propSetter.activate(); return policy; } catch (ConsumerException ex) { final Throwable t = ex.getCause(); if (t instanceof InterruptedException || t instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create an RewritePolicy. Reported error follows.", t); } catch (Exception oops) { if (oops instanceof InterruptedException || oops instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create an RewritePolicy. Reported error follows.", oops); } return null; } /** * Used internally to parse an {@link ErrorHandler} element. */ private void parseErrorHandler(Element element, Appender appender) { final ErrorHandler eh = (ErrorHandler) OptionConverter.instantiateByClassName( subst(element.getAttribute(CLASS_ATTR)), ErrorHandler.class, null); if (eh != null) { eh.setAppender(appender); final PropertySetter propSetter = new PropertySetter(eh); forEachElement(element.getChildNodes(), currentElement -> { final String tagName = currentElement.getTagName(); if (tagName.equals(PARAM_TAG)) { setParameter(currentElement, propSetter); } }); propSetter.activate(); appender.setErrorHandler(eh); } } /** * Used internally to parse a filter element. * @param filterElement The Filter Element. */ public void addFilter(final AtomicReference<Filter> ref, final Element filterElement) { final Filter value = parseFilters(filterElement); ref.accumulateAndGet(value, FilterAdapter::addFilter); } /** * Used internally to parse a filter element. */ public Filter parseFilters(final Element filterElement) { final String className = subst(filterElement.getAttribute(CLASS_ATTR)); LOGGER.debug("Class name: [" + className + ']'); Filter filter = manager.parseFilter(className, filterElement, this); if (filter == null) { filter = buildFilter(className, filterElement); } return filter; } private Filter buildFilter(final String className, final Element filterElement) { try { final Filter filter = LoaderUtil.newInstanceOf(className); final PropertySetter propSetter = new PropertySetter(filter); forEachElement(filterElement.getChildNodes(), currentElement -> { // Parse appender parameters switch (currentElement.getTagName()) { case PARAM_TAG: setParameter(currentElement, propSetter); break; } }); propSetter.activate(); return filter; } catch (ConsumerException ex) { final Throwable t = ex.getCause(); if (t instanceof InterruptedException || t instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create an Filter. Reported error follows.", t); } catch (Exception oops) { if (oops instanceof InterruptedException || oops instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create an Filter. Reported error follows.", oops); } return null; } /** * Used internally to parse an category element. */ private void parseCategory(final Element loggerElement) { // Create a new org.apache.log4j.Category object from the <category> element. final String catName = subst(loggerElement.getAttribute(NAME_ATTR)); final boolean additivity = OptionConverter.toBoolean(subst(loggerElement.getAttribute(ADDITIVITY_ATTR)), true); LoggerConfig loggerConfig = getLogger(catName); if (loggerConfig == null) { loggerConfig = new LoggerConfig(catName, org.apache.logging.log4j.Level.ERROR, additivity); addLogger(catName, loggerConfig); } else { loggerConfig.setAdditive(additivity); } parseChildrenOfLoggerElement(loggerElement, loggerConfig, false); } /** * Used internally to parse the root category element. */ private void parseRoot(final Element rootElement) { final LoggerConfig root = getRootLogger(); parseChildrenOfLoggerElement(rootElement, root, true); } /** * Used internally to parse the children of a LoggerConfig element. */ private void parseChildrenOfLoggerElement(Element catElement, LoggerConfig loggerConfig, boolean isRoot) { final PropertySetter propSetter = new PropertySetter(loggerConfig); loggerConfig.getAppenderRefs().clear(); forEachElement(catElement.getChildNodes(), currentElement -> { switch (currentElement.getTagName()) { case APPENDER_REF_TAG: { final Appender appender = findAppenderByReference(currentElement); final String refName = subst(currentElement.getAttribute(REF_ATTR)); if (appender != null) { LOGGER.debug( "Adding appender named [{}] to loggerConfig [{}].", refName, loggerConfig.getName()); loggerConfig.addAppender(getAppender(refName), null, null); } else { LOGGER.debug("Appender named [{}] not found.", refName); } break; } case LEVEL_TAG: case PRIORITY_TAG: { parseLevel(currentElement, loggerConfig, isRoot); break; } case PARAM_TAG: { setParameter(currentElement, propSetter); break; } default: { quietParseUnrecognizedElement(loggerConfig, currentElement, props); } } }); propSetter.activate(); } /** * Used internally to parse a layout element. * @param layoutElement The Layout Element. * @return The Layout. */ public Layout parseLayout(final Element layoutElement) { final String className = subst(layoutElement.getAttribute(CLASS_ATTR)); LOGGER.debug("Parsing layout of class: \"{}\"", className); Layout layout = manager.parseLayout(className, layoutElement, this); if (layout == null) { layout = buildLayout(className, layoutElement); } return layout; } private Layout buildLayout(final String className, final Element layout_element) { try { final Layout layout = LoaderUtil.newInstanceOf(className); final PropertySetter propSetter = new PropertySetter(layout); forEachElement(layout_element.getChildNodes(), currentElement -> { final String tagName = currentElement.getTagName(); if (tagName.equals(PARAM_TAG)) { setParameter(currentElement, propSetter); } else { try { parseUnrecognizedElement(layout, currentElement, props); } catch (Exception ex) { throw new ConsumerException(ex); } } }); propSetter.activate(); return layout; } catch (Exception e) { final Throwable cause = e.getCause(); if (e instanceof InterruptedException || e instanceof InterruptedIOException || cause instanceof InterruptedException || cause instanceof InterruptedIOException) { Thread.currentThread().interrupt(); } LOGGER.error("Could not create the Layout. Reported error follows.", e); } return null; } public TriggeringPolicy parseTriggeringPolicy(final Element policyElement) { final String className = subst(policyElement.getAttribute(CLASS_ATTR)); LOGGER.debug("Parsing triggering policy of class: \"{}\"", className); return manager.parseTriggeringPolicy(className, policyElement, this); } /** * Used internally to parse a level element. */ private void parseLevel(Element element, LoggerConfig logger, boolean isRoot) { String catName = logger.getName(); if (isRoot) { catName = "root"; } final String priStr = subst(element.getAttribute(VALUE_ATTR)); LOGGER.debug("Level value for {} is [{}].", catName, priStr); if (INHERITED.equalsIgnoreCase(priStr) || NULL.equalsIgnoreCase(priStr)) { if (isRoot) { LOGGER.error("Root level cannot be inherited. Ignoring directive."); } else { logger.setLevel(null); } } else { final String className = subst(element.getAttribute(CLASS_ATTR)); final Level level; if (EMPTY_STR.equals(className)) { level = OptionConverter.toLevel(priStr, DEFAULT_LEVEL); } else { level = OptionConverter.toLevel(className, priStr, DEFAULT_LEVEL); } logger.setLevel(level != null ? level.getVersion2Level() : null); } LOGGER.debug("{} level set to {}", catName, logger.getLevel()); } private void setParameter(Element element, PropertySetter propSetter) { final String name = subst(element.getAttribute(NAME_ATTR)); String value = element.getAttribute(VALUE_ATTR); value = subst(OptionConverter.convertSpecialChars(value)); propSetter.setProperty(name, value); } /** * Used internally to configure the log4j framework by parsing a DOM * tree of XML elements based on <a * href="doc-files/log4j.dtd">log4j.dtd</a>. */ private void parse(Element element) { final String rootElementName = element.getTagName(); if (!rootElementName.equals(CONFIGURATION_TAG)) { if (rootElementName.equals(OLD_CONFIGURATION_TAG)) { LOGGER.warn("The <" + OLD_CONFIGURATION_TAG + "> element has been deprecated."); LOGGER.warn("Use the <" + CONFIGURATION_TAG + "> element instead."); } else { LOGGER.error("DOM element is - not a <" + CONFIGURATION_TAG + "> element."); return; } } final String debugAttrib = subst(element.getAttribute(INTERNAL_DEBUG_ATTR)); LOGGER.debug("debug attribute= \"" + debugAttrib + "\"."); // if the log4j.dtd is not specified in the XML file, then the // "debug" attribute is returned as the empty string. String status = "error"; if (!debugAttrib.isEmpty() && !debugAttrib.equals("null")) { status = OptionConverter.toBoolean(debugAttrib, true) ? "debug" : "error"; } else { LOGGER.debug("Ignoring " + INTERNAL_DEBUG_ATTR + " attribute."); } final String confDebug = subst(element.getAttribute(CONFIG_DEBUG_ATTR)); if (!confDebug.isEmpty() && !confDebug.equals("null")) { LOGGER.warn("The \"" + CONFIG_DEBUG_ATTR + "\" attribute is deprecated."); LOGGER.warn("Use the \"" + INTERNAL_DEBUG_ATTR + "\" attribute instead."); status = OptionConverter.toBoolean(confDebug, true) ? "debug" : "error"; } final StatusConfiguration statusConfig = new StatusConfiguration().withStatus(status); statusConfig.initialize(); final String threshold = subst(element.getAttribute(THRESHOLD_ATTR)); if (threshold != null) { final org.apache.logging.log4j.Level level = OptionConverter.convertLevel(threshold.trim(), org.apache.logging.log4j.Level.ALL); addFilter(ThresholdFilter.createFilter(level, Result.NEUTRAL, Result.DENY)); } forEachElement(element.getChildNodes(), currentElement -> { switch (currentElement.getTagName()) { case CATEGORY: case LOGGER_ELEMENT: parseCategory(currentElement); break; case ROOT_TAG: parseRoot(currentElement); break; case RENDERER_TAG: LOGGER.warn("Log4j 1 renderers are not supported by Log4j 2 and will be ignored."); break; case THROWABLE_RENDERER_TAG: LOGGER.warn("Log4j 1 throwable renderers are not supported by Log4j 2 and will be ignored."); break; case CATEGORY_FACTORY_TAG: case LOGGER_FACTORY_TAG: LOGGER.warn("Log4j 1 logger factories are not supported by Log4j 2 and will be ignored."); break; case APPENDER_TAG: final Appender appender = parseAppender(currentElement); appenderMap.put(appender.getName(), appender); addAppender(AppenderAdapter.adapt(appender)); break; default: quietParseUnrecognizedElement(null, currentElement, props); } }); } private String subst(final String value) { return getStrSubstitutor().replace(value); } public static void forEachElement(final NodeList list, final Consumer<Element> consumer) { IntStream.range(0, list.getLength()) .mapToObj(list::item) .filter(node -> node.getNodeType() == Node.ELEMENT_NODE) .forEach(node -> consumer.accept((Element) node)); } private interface ParseAction { Document parse(final DocumentBuilder parser) throws SAXException, IOException; } private static class SAXErrorHandler implements org.xml.sax.ErrorHandler { private static final org.apache.logging.log4j.Logger LOGGER = StatusLogger.getLogger(); @Override public void error(final SAXParseException ex) { emitMessage("Continuable parsing error ", ex); } @Override public void fatalError(final SAXParseException ex) { emitMessage("Fatal parsing error ", ex); } @Override public void warning(final SAXParseException ex) { emitMessage("Parsing warning ", ex); } private static void emitMessage(final String msg, final SAXParseException ex) { LOGGER.warn("{} {} and column {}", msg, ex.getLineNumber(), ex.getColumnNumber()); LOGGER.warn(ex.getMessage(), ex.getException()); } } private static class ConsumerException extends RuntimeException { ConsumerException(final Exception ex) { super(ex); } } }
googleapis/google-cloud-java
35,085
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/BatchRunTestCasesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/test_case.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * The response message for * [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3beta1.TestCases.BatchRunTestCases]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse} */ public final class BatchRunTestCasesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) BatchRunTestCasesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use BatchRunTestCasesResponse.newBuilder() to construct. private BatchRunTestCasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchRunTestCasesResponse() { results_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchRunTestCasesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.class, com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.Builder.class); } public static final int RESULTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult> results_; /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult> getResultsList() { return results_; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder> getResultsOrBuilderList() { return results_; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ @java.lang.Override public int getResultsCount() { return results_.size(); } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult getResults(int index) { return results_.get(index); } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder getResultsOrBuilder( int index) { return results_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < results_.size(); i++) { output.writeMessage(1, results_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < results_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, results_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse other = (com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) obj; if (!getResultsList().equals(other.getResultsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getResultsCount() > 0) { hash = (37 * hash) + RESULTS_FIELD_NUMBER; hash = (53 * hash) + getResultsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for * [TestCases.BatchRunTestCases][google.cloud.dialogflow.cx.v3beta1.TestCases.BatchRunTestCases]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.class, com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (resultsBuilder_ == null) { results_ = java.util.Collections.emptyList(); } else { results_ = null; resultsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BatchRunTestCasesResponse_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse build() { com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse result = new com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse result) { if (resultsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { results_ = java.util.Collections.unmodifiableList(results_); bitField0_ = (bitField0_ & ~0x00000001); } result.results_ = results_; } else { result.results_ = resultsBuilder_.build(); } } private void buildPartial0( com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse.getDefaultInstance()) return this; if (resultsBuilder_ == null) { if (!other.results_.isEmpty()) { if (results_.isEmpty()) { results_ = other.results_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureResultsIsMutable(); results_.addAll(other.results_); } onChanged(); } } else { if (!other.results_.isEmpty()) { if (resultsBuilder_.isEmpty()) { resultsBuilder_.dispose(); resultsBuilder_ = null; results_ = other.results_; bitField0_ = (bitField0_ & ~0x00000001); resultsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getResultsFieldBuilder() : null; } else { resultsBuilder_.addAllMessages(other.results_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult m = input.readMessage( com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.parser(), extensionRegistry); if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.add(m); } else { resultsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult> results_ = java.util.Collections.emptyList(); private void ensureResultsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { results_ = new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult>( results_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder> resultsBuilder_; /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public java.util.List<com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult> getResultsList() { if (resultsBuilder_ == null) { return java.util.Collections.unmodifiableList(results_); } else { return resultsBuilder_.getMessageList(); } } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public int getResultsCount() { if (resultsBuilder_ == null) { return results_.size(); } else { return resultsBuilder_.getCount(); } } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult getResults(int index) { if (resultsBuilder_ == null) { return results_.get(index); } else { return resultsBuilder_.getMessage(index); } } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder setResults( int index, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult value) { if (resultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultsIsMutable(); results_.set(index, value); onChanged(); } else { resultsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder setResults( int index, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder builderForValue) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.set(index, builderForValue.build()); onChanged(); } else { resultsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder addResults(com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult value) { if (resultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultsIsMutable(); results_.add(value); onChanged(); } else { resultsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder addResults( int index, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult value) { if (resultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureResultsIsMutable(); results_.add(index, value); onChanged(); } else { resultsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder addResults( com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder builderForValue) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.add(builderForValue.build()); onChanged(); } else { resultsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder addResults( int index, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder builderForValue) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.add(index, builderForValue.build()); onChanged(); } else { resultsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder addAllResults( java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult> values) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, results_); onChanged(); } else { resultsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder clearResults() { if (resultsBuilder_ == null) { results_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { resultsBuilder_.clear(); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public Builder removeResults(int index) { if (resultsBuilder_ == null) { ensureResultsIsMutable(); results_.remove(index); onChanged(); } else { resultsBuilder_.remove(index); } return this; } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder getResultsBuilder( int index) { return getResultsFieldBuilder().getBuilder(index); } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder getResultsOrBuilder( int index) { if (resultsBuilder_ == null) { return results_.get(index); } else { return resultsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public java.util.List<? extends com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder> getResultsOrBuilderList() { if (resultsBuilder_ != null) { return resultsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(results_); } } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder addResultsBuilder() { return getResultsFieldBuilder() .addBuilder(com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.getDefaultInstance()); } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder addResultsBuilder( int index) { return getResultsFieldBuilder() .addBuilder( index, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.getDefaultInstance()); } /** * * * <pre> * The test case results. The detailed * [conversation * turns][google.cloud.dialogflow.cx.v3beta1.TestCaseResult.conversation_turns] * are empty in this response. * </pre> * * <code>repeated .google.cloud.dialogflow.cx.v3beta1.TestCaseResult results = 1;</code> */ public java.util.List<com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder> getResultsBuilderList() { return getResultsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder> getResultsFieldBuilder() { if (resultsBuilder_ == null) { resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResult.Builder, com.google.cloud.dialogflow.cx.v3beta1.TestCaseResultOrBuilder>( results_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); results_ = null; } return resultsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse) private static final com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse(); } public static com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchRunTestCasesResponse> PARSER = new com.google.protobuf.AbstractParser<BatchRunTestCasesResponse>() { @java.lang.Override public BatchRunTestCasesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchRunTestCasesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchRunTestCasesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BatchRunTestCasesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,096
java-video-intelligence/proto-google-cloud-video-intelligence-v1/src/main/java/com/google/cloud/videointelligence/v1/AnnotateVideoResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1; /** * * * <pre> * Video annotation response. Included in the `response` * field of the `Operation` returned by the `GetOperation` * call of the `google::longrunning::Operations` service. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.AnnotateVideoResponse} */ public final class AnnotateVideoResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.AnnotateVideoResponse) AnnotateVideoResponseOrBuilder { private static final long serialVersionUID = 0L; // Use AnnotateVideoResponse.newBuilder() to construct. private AnnotateVideoResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AnnotateVideoResponse() { annotationResults_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AnnotateVideoResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_AnnotateVideoResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_AnnotateVideoResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.AnnotateVideoResponse.class, com.google.cloud.videointelligence.v1.AnnotateVideoResponse.Builder.class); } public static final int ANNOTATION_RESULTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationResults> annotationResults_; /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationResults> getAnnotationResultsList() { return annotationResults_; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder> getAnnotationResultsOrBuilderList() { return annotationResults_; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ @java.lang.Override public int getAnnotationResultsCount() { return annotationResults_.size(); } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.VideoAnnotationResults getAnnotationResults( int index) { return annotationResults_.get(index); } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder getAnnotationResultsOrBuilder(int index) { return annotationResults_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < annotationResults_.size(); i++) { output.writeMessage(1, annotationResults_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < annotationResults_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationResults_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1.AnnotateVideoResponse)) { return super.equals(obj); } com.google.cloud.videointelligence.v1.AnnotateVideoResponse other = (com.google.cloud.videointelligence.v1.AnnotateVideoResponse) obj; if (!getAnnotationResultsList().equals(other.getAnnotationResultsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAnnotationResultsCount() > 0) { hash = (37 * hash) + ANNOTATION_RESULTS_FIELD_NUMBER; hash = (53 * hash) + getAnnotationResultsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1.AnnotateVideoResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Video annotation response. Included in the `response` * field of the `Operation` returned by the `GetOperation` * call of the `google::longrunning::Operations` service. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.AnnotateVideoResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.AnnotateVideoResponse) com.google.cloud.videointelligence.v1.AnnotateVideoResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_AnnotateVideoResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_AnnotateVideoResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.AnnotateVideoResponse.class, com.google.cloud.videointelligence.v1.AnnotateVideoResponse.Builder.class); } // Construct using com.google.cloud.videointelligence.v1.AnnotateVideoResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (annotationResultsBuilder_ == null) { annotationResults_ = java.util.Collections.emptyList(); } else { annotationResults_ = null; annotationResultsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_AnnotateVideoResponse_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1.AnnotateVideoResponse getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1.AnnotateVideoResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1.AnnotateVideoResponse build() { com.google.cloud.videointelligence.v1.AnnotateVideoResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1.AnnotateVideoResponse buildPartial() { com.google.cloud.videointelligence.v1.AnnotateVideoResponse result = new com.google.cloud.videointelligence.v1.AnnotateVideoResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.videointelligence.v1.AnnotateVideoResponse result) { if (annotationResultsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { annotationResults_ = java.util.Collections.unmodifiableList(annotationResults_); bitField0_ = (bitField0_ & ~0x00000001); } result.annotationResults_ = annotationResults_; } else { result.annotationResults_ = annotationResultsBuilder_.build(); } } private void buildPartial0(com.google.cloud.videointelligence.v1.AnnotateVideoResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1.AnnotateVideoResponse) { return mergeFrom((com.google.cloud.videointelligence.v1.AnnotateVideoResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.videointelligence.v1.AnnotateVideoResponse other) { if (other == com.google.cloud.videointelligence.v1.AnnotateVideoResponse.getDefaultInstance()) return this; if (annotationResultsBuilder_ == null) { if (!other.annotationResults_.isEmpty()) { if (annotationResults_.isEmpty()) { annotationResults_ = other.annotationResults_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAnnotationResultsIsMutable(); annotationResults_.addAll(other.annotationResults_); } onChanged(); } } else { if (!other.annotationResults_.isEmpty()) { if (annotationResultsBuilder_.isEmpty()) { annotationResultsBuilder_.dispose(); annotationResultsBuilder_ = null; annotationResults_ = other.annotationResults_; bitField0_ = (bitField0_ & ~0x00000001); annotationResultsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAnnotationResultsFieldBuilder() : null; } else { annotationResultsBuilder_.addAllMessages(other.annotationResults_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.videointelligence.v1.VideoAnnotationResults m = input.readMessage( com.google.cloud.videointelligence.v1.VideoAnnotationResults.parser(), extensionRegistry); if (annotationResultsBuilder_ == null) { ensureAnnotationResultsIsMutable(); annotationResults_.add(m); } else { annotationResultsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationResults> annotationResults_ = java.util.Collections.emptyList(); private void ensureAnnotationResultsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { annotationResults_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1.VideoAnnotationResults>( annotationResults_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.VideoAnnotationResults, com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder, com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder> annotationResultsBuilder_; /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationResults> getAnnotationResultsList() { if (annotationResultsBuilder_ == null) { return java.util.Collections.unmodifiableList(annotationResults_); } else { return annotationResultsBuilder_.getMessageList(); } } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public int getAnnotationResultsCount() { if (annotationResultsBuilder_ == null) { return annotationResults_.size(); } else { return annotationResultsBuilder_.getCount(); } } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public com.google.cloud.videointelligence.v1.VideoAnnotationResults getAnnotationResults( int index) { if (annotationResultsBuilder_ == null) { return annotationResults_.get(index); } else { return annotationResultsBuilder_.getMessage(index); } } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder setAnnotationResults( int index, com.google.cloud.videointelligence.v1.VideoAnnotationResults value) { if (annotationResultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationResultsIsMutable(); annotationResults_.set(index, value); onChanged(); } else { annotationResultsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder setAnnotationResults( int index, com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder builderForValue) { if (annotationResultsBuilder_ == null) { ensureAnnotationResultsIsMutable(); annotationResults_.set(index, builderForValue.build()); onChanged(); } else { annotationResultsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder addAnnotationResults( com.google.cloud.videointelligence.v1.VideoAnnotationResults value) { if (annotationResultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationResultsIsMutable(); annotationResults_.add(value); onChanged(); } else { annotationResultsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder addAnnotationResults( int index, com.google.cloud.videointelligence.v1.VideoAnnotationResults value) { if (annotationResultsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationResultsIsMutable(); annotationResults_.add(index, value); onChanged(); } else { annotationResultsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder addAnnotationResults( com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder builderForValue) { if (annotationResultsBuilder_ == null) { ensureAnnotationResultsIsMutable(); annotationResults_.add(builderForValue.build()); onChanged(); } else { annotationResultsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder addAnnotationResults( int index, com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder builderForValue) { if (annotationResultsBuilder_ == null) { ensureAnnotationResultsIsMutable(); annotationResults_.add(index, builderForValue.build()); onChanged(); } else { annotationResultsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder addAllAnnotationResults( java.lang.Iterable<? extends com.google.cloud.videointelligence.v1.VideoAnnotationResults> values) { if (annotationResultsBuilder_ == null) { ensureAnnotationResultsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationResults_); onChanged(); } else { annotationResultsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder clearAnnotationResults() { if (annotationResultsBuilder_ == null) { annotationResults_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { annotationResultsBuilder_.clear(); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public Builder removeAnnotationResults(int index) { if (annotationResultsBuilder_ == null) { ensureAnnotationResultsIsMutable(); annotationResults_.remove(index); onChanged(); } else { annotationResultsBuilder_.remove(index); } return this; } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder getAnnotationResultsBuilder(int index) { return getAnnotationResultsFieldBuilder().getBuilder(index); } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder getAnnotationResultsOrBuilder(int index) { if (annotationResultsBuilder_ == null) { return annotationResults_.get(index); } else { return annotationResultsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public java.util.List< ? extends com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder> getAnnotationResultsOrBuilderList() { if (annotationResultsBuilder_ != null) { return annotationResultsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(annotationResults_); } } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder addAnnotationResultsBuilder() { return getAnnotationResultsFieldBuilder() .addBuilder( com.google.cloud.videointelligence.v1.VideoAnnotationResults.getDefaultInstance()); } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder addAnnotationResultsBuilder(int index) { return getAnnotationResultsFieldBuilder() .addBuilder( index, com.google.cloud.videointelligence.v1.VideoAnnotationResults.getDefaultInstance()); } /** * * * <pre> * Annotation results for all videos specified in `AnnotateVideoRequest`. * </pre> * * <code> * repeated .google.cloud.videointelligence.v1.VideoAnnotationResults annotation_results = 1; * </code> */ public java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder> getAnnotationResultsBuilderList() { return getAnnotationResultsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.VideoAnnotationResults, com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder, com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder> getAnnotationResultsFieldBuilder() { if (annotationResultsBuilder_ == null) { annotationResultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.VideoAnnotationResults, com.google.cloud.videointelligence.v1.VideoAnnotationResults.Builder, com.google.cloud.videointelligence.v1.VideoAnnotationResultsOrBuilder>( annotationResults_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); annotationResults_ = null; } return annotationResultsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.AnnotateVideoResponse) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.AnnotateVideoResponse) private static final com.google.cloud.videointelligence.v1.AnnotateVideoResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.AnnotateVideoResponse(); } public static com.google.cloud.videointelligence.v1.AnnotateVideoResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AnnotateVideoResponse> PARSER = new com.google.protobuf.AbstractParser<AnnotateVideoResponse>() { @java.lang.Override public AnnotateVideoResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AnnotateVideoResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AnnotateVideoResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1.AnnotateVideoResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
35,368
substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/jdk/localization/LocalizationFeature.java
/* * Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.svm.hosted.jdk.localization; import java.lang.reflect.Field; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.StandardCharsets; import java.nio.charset.UnsupportedCharsetException; import java.text.spi.BreakIteratorProvider; import java.text.spi.CollatorProvider; import java.text.spi.DateFormatProvider; import java.text.spi.DateFormatSymbolsProvider; import java.text.spi.DecimalFormatSymbolsProvider; import java.text.spi.NumberFormatProvider; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.MissingResourceException; import java.util.Objects; import java.util.ResourceBundle; import java.util.ServiceLoader; import java.util.Set; import java.util.concurrent.ForkJoinPool; import java.util.function.BiFunction; import java.util.function.Function; import java.util.spi.CalendarDataProvider; import java.util.spi.CalendarNameProvider; import java.util.spi.CurrencyNameProvider; import java.util.spi.LocaleNameProvider; import java.util.spi.LocaleServiceProvider; import java.util.spi.ResourceBundleControlProvider; import java.util.spi.TimeZoneNameProvider; import java.util.stream.Collectors; import org.graalvm.nativeimage.ImageSingletons; import org.graalvm.nativeimage.Platform; import org.graalvm.nativeimage.Platforms; import org.graalvm.nativeimage.dynamicaccess.AccessCondition; import org.graalvm.nativeimage.impl.RuntimeClassInitializationSupport; import com.oracle.graal.pointsto.ObjectScanner; import com.oracle.svm.core.ClassLoaderSupport; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.feature.AutomaticallyRegisteredFeature; import com.oracle.svm.core.feature.InternalFeature; import com.oracle.svm.core.jdk.localization.BundleContentSubstitutedLocalizationSupport; import com.oracle.svm.core.jdk.localization.LocalizationSupport; import com.oracle.svm.core.jdk.localization.OptimizedLocalizationSupport; import com.oracle.svm.core.jdk.localization.OptimizedLocalizationSupport.AdaptersByClassKey; import com.oracle.svm.core.jdk.localization.compression.GzipBundleCompression; import com.oracle.svm.core.jdk.localization.substitutions.Target_sun_util_locale_provider_LocaleServiceProviderPool_OptimizedLocaleMode; import com.oracle.svm.core.option.AccumulatingLocatableMultiOptionValue; import com.oracle.svm.core.option.HostedOptionKey; import com.oracle.svm.core.util.UserError; import com.oracle.svm.core.util.VMError; import com.oracle.svm.hosted.FeatureImpl.AfterRegistrationAccessImpl; import com.oracle.svm.hosted.FeatureImpl.DuringAnalysisAccessImpl; import com.oracle.svm.hosted.FeatureImpl.DuringSetupAccessImpl; import com.oracle.svm.hosted.ImageClassLoader; import com.oracle.svm.util.LocaleUtil; import com.oracle.svm.util.LogUtils; import jdk.graal.compiler.nodes.ValueNode; import jdk.graal.compiler.nodes.graphbuilderconf.GraphBuilderContext; import jdk.graal.compiler.nodes.graphbuilderconf.NodePlugin; import jdk.graal.compiler.options.Option; import jdk.graal.compiler.options.OptionStability; import jdk.graal.compiler.options.OptionType; import jdk.internal.access.SharedSecrets; import jdk.vm.ci.meta.ResolvedJavaField; import jdk.vm.ci.meta.ResolvedJavaMethod; import jdk.vm.ci.meta.ResolvedJavaType; import sun.text.spi.JavaTimeDateTimePatternProvider; import sun.util.cldr.CLDRLocaleProviderAdapter; import sun.util.locale.provider.LocaleProviderAdapter; import sun.util.locale.provider.ResourceBundleBasedAdapter; import sun.util.resources.LocaleData; import sun.util.resources.ParallelListResourceBundle; import sun.util.spi.CalendarProvider; /** * LocalizationFeature is the core class of SVM localization support. It contains all the options * that can be used to configure how localization in the resulting image should work. One can * specify what charsets, locales and resource bundles should be accessible. The runtime data for * localization is stored in an image singleton of type {@link LocalizationSupport} or one of its * subtypes. * * In case of ResourceBundles, one can also specify how bundles should be handled, because currently * there are two different modes. * * The first approach is using a simple in memory map instead of the original JDK lookup. This * simpler implementation leads to image size savings for smaller images such as hello world, but * could cause compatibility issues and maintenance overhead. It is implemented in * {@link OptimizedLocalizationSupport}. * * The second approach relies on the original JVM implementation instead. This approach is * consistent by design, which solves compatibility issues and reduces maintenance overhead. * Unfortunately, the default way of storing bundle data in getContents methods, see * {@code sun.text.resources.FormatData} for example, is not very AOT friendly. Compiling these * methods is time consuming and results in a bloated image (183 MB HelloWorld with all locales). * Therefore, the bundle content itself is again stored in the image heap by default and furthermore * is compressed to reduce the image size, see {@link BundleContentSubstitutedLocalizationSupport} * and {@link GzipBundleCompression}. * * @author d-kozak * @see LocalizationSupport * @see OptimizedLocalizationSupport * @see BundleContentSubstitutedLocalizationSupport */ @AutomaticallyRegisteredFeature public class LocalizationFeature implements InternalFeature { /** * Locales required by default in Java. * * @see Locale#getAvailableLocales() */ private static final Locale[] MINIMAL_LOCALES = new Locale[]{Locale.ROOT, Locale.ENGLISH, Locale.US}; protected final boolean optimizedMode = Options.LocalizationOptimizedMode.getValue(); private final boolean substituteLoadLookup = Options.LocalizationSubstituteLoadLookup.getValue(); protected final boolean trace = Options.TraceLocalizationFeature.getValue(); private final ForkJoinPool compressionPool = Options.LocalizationCompressInParallel.getValue() ? ForkJoinPool.commonPool() : null; private Charset defaultCharset; protected Set<Locale> allLocales; protected LocalizationSupport support; private Function<String, Class<?>> findClassByName; private Field baseLocaleCacheField; private Field localeCacheField; private Field candidatesCacheField; private Field localeObjectCacheMapField; private Field langAliasesCacheField; private Field parentLocalesMapField; @Platforms(Platform.HOSTED_ONLY.class) private ImageClassLoader imageClassLoader; public static class Options { @Option(help = "Comma separated list of bundles to be included into the image.", type = OptionType.User)// public static final HostedOptionKey<AccumulatingLocatableMultiOptionValue.Strings> IncludeResourceBundles = new HostedOptionKey<>( AccumulatingLocatableMultiOptionValue.Strings.buildWithCommaDelimiter()); @Option(help = "Make all hosted charsets available at run time", stability = OptionStability.STABLE)// public static final HostedOptionKey<Boolean> AddAllCharsets = new HostedOptionKey<>(false); @Option(help = "Default locale of the image, by the default it is the same as the default locale of the image builder.", type = OptionType.User, // deprecated = true, deprecationMessage = "Please switch to using system properties such as '-Duser.country=CH -Duser.language=de'.")// public static final HostedOptionKey<String> DefaultLocale = new HostedOptionKey<>(Locale.getDefault().toLanguageTag()); @Option(help = "Default charset of the image, by the default it is the same as the default charset of the image builder.", type = OptionType.User)// public static final HostedOptionKey<String> DefaultCharset = new HostedOptionKey<>(Charset.defaultCharset().name()); @Option(help = "Comma separated list of locales to be included into the image. The default locale is included in the list automatically if not present.", type = OptionType.User, stability = OptionStability.STABLE)// public static final HostedOptionKey<AccumulatingLocatableMultiOptionValue.Strings> IncludeLocales = new HostedOptionKey<>( AccumulatingLocatableMultiOptionValue.Strings.buildWithCommaDelimiter()); @Option(help = "Make all hosted locales available at run time.", type = OptionType.User)// public static final HostedOptionKey<Boolean> IncludeAllLocales = new HostedOptionKey<>(false); @Option(help = "Optimize the resource bundle lookup using a simple map.", type = OptionType.User)// public static final HostedOptionKey<Boolean> LocalizationOptimizedMode = new HostedOptionKey<>(false); @Option(help = "Store the resource bundle content more efficiently in the fallback mode.", type = OptionType.User)// public static final HostedOptionKey<Boolean> LocalizationSubstituteLoadLookup = new HostedOptionKey<>(true); @Option(help = "Regular expressions matching which bundles should be compressed.", type = OptionType.User)// public static final HostedOptionKey<AccumulatingLocatableMultiOptionValue.Strings> LocalizationCompressBundles = new HostedOptionKey<>(AccumulatingLocatableMultiOptionValue.Strings.build()); @Option(help = "Compress the bundles in parallel.", type = OptionType.Expert)// public static final HostedOptionKey<Boolean> LocalizationCompressInParallel = new HostedOptionKey<>(true); @Option(help = "When enabled, localization feature details are printed.", type = OptionType.Debug)// public static final HostedOptionKey<Boolean> TraceLocalizationFeature = new HostedOptionKey<>(false); } /** * Many subclasses of {@link Charset} initialize encoding and decoding tables lazily. They all * follow the same pattern: the methods "initc2b" and/or "initb2c" perform the initialization, * and then set a field "c2bInitialized" or "b2cInitialized" to true. We run the initialization * eagerly by creating an encoder and decoder during image generation in * {@link LocalizationFeature#addCharset}. So we know that the "init*" methods do nothing, and * we replace calls to them with nothing, i.e,, remove calls to them. * * We could do all this with individual {@link Substitute method substitutions}, but it would * require a lot of substitution methods that all look the same. */ public static final class CharsetNodePlugin implements NodePlugin { @Override public boolean handleInvoke(GraphBuilderContext b, ResolvedJavaMethod method, ValueNode[] args) { if ((method.getName().equals("initc2b") || method.getName().equals("initb2c")) && b.getMetaAccess().lookupJavaType(Charset.class).isAssignableFrom(method.getDeclaringClass())) { /* * Verify that the "*Initialized" field corresponding with the method was set to * true, i.e., that initialization was done eagerly. */ ResolvedJavaType charsetType = method.getDeclaringClass(); ResolvedJavaField initializedField = findStaticField(charsetType, method.getName().substring(4, 7) + "Initialized"); if (!b.getConstantReflection().readFieldValue(initializedField, null).asBoolean()) { String charsetName = charsetType.getUnqualifiedName(); try { Charset charset = Charset.forName(charsetName); addCharset(charset); } catch (UnsupportedCharsetException e) { throw VMError.shouldNotReachHere("Could not find non-initialized charset " + charsetType.getSourceFileName(), e); } } /* We "handled" the method invocation by doing nothing. */ return true; } return false; } private static ResolvedJavaField findStaticField(ResolvedJavaType declaringClass, String name) { for (ResolvedJavaField field : declaringClass.getStaticFields()) { if (field.getName().equals(name)) { return field; } } throw VMError.shouldNotReachHereUnexpectedInput(name); // ExcludeFromJacocoGeneratedReport } } @Override public void afterRegistration(AfterRegistrationAccess access) { findClassByName = access::findClassByName; allLocales = processLocalesOption(); if (Options.DefaultLocale.hasBeenSet()) { LogUtils.warning("Option %s is deprecated and has no effect. The program's default locale is determined at run-time. " + "Use %s and %s to manage the locales included in the image.%n", Options.DefaultLocale.getName(), Options.IncludeLocales.getName(), Options.IncludeAllLocales.getName()); } String defaultCharsetOptionValue = Options.DefaultCharset.getValue(); try { defaultCharset = Charset.forName(defaultCharsetOptionValue); VMError.guarantee(defaultCharset.name().equals(defaultCharsetOptionValue), "Failed to locate charset %s, instead %s was provided", defaultCharsetOptionValue, defaultCharset.name()); } catch (IllegalCharsetNameException | UnsupportedCharsetException ex) { throw UserError.abort(ex, "Invalid default charset %s", defaultCharsetOptionValue); } support = selectLocalizationSupport(); ImageSingletons.add(LocalizationSupport.class, support); addCharsets(); if (optimizedMode) { /* * Providers are only preprocessed in the optimized mode. */ addProviders(); } this.imageClassLoader = ((AfterRegistrationAccessImpl) access).getImageClassLoader(); } @Override public void duringSetup(DuringSetupAccess a) { DuringSetupAccessImpl access = (DuringSetupAccessImpl) a; if (optimizedMode) { access.registerObjectReachableCallback(ResourceBundle.class, this::eagerlyInitializeBundles); } langAliasesCacheField = access.findField(CLDRLocaleProviderAdapter.class, "langAliasesCache"); parentLocalesMapField = access.findField(CLDRLocaleProviderAdapter.class, "parentLocalesMap"); baseLocaleCacheField = access.findField("sun.util.locale.BaseLocale$1InterningCache", "CACHE"); localeCacheField = access.findField("java.util.Locale$LocaleCache", "LOCALE_CACHE"); localeObjectCacheMapField = null; candidatesCacheField = access.findField("java.util.ResourceBundle$Control", "CANDIDATES_CACHE"); String reason = "All ResourceBundleControlProvider that are registered as services end up as objects in the image heap, and are therefore registered to be initialized at image build time"; ServiceLoader.load(ResourceBundleControlProvider.class).stream() .forEach(provider -> ImageSingletons.lookup(RuntimeClassInitializationSupport.class).initializeAtBuildTime(provider.type(), reason)); } /** * In the optimized localization support, the bundles are stored in a map. In order to make the * getContents methods unreachable, the bundles are initialized eagerly and the lookup methods * are substituted. However, if there are bundle instances somewhere in the heap that were not * put in the map, they won't be initialized and therefore accessing their content will cause * runtime failures. Therefore, we register a callback that notifies us for every reachable * {@link ResourceBundle} object in the heap, and we eagerly initialize it. */ @SuppressWarnings("unused") private void eagerlyInitializeBundles(DuringAnalysisAccess access, ResourceBundle bundle, ObjectScanner.ScanReason reason) { assert optimizedMode : "Should only be triggered in the optimized mode."; try { /* * getKeys can be null for ResourceBundle.NONEXISTENT_BUNDLE, which causes the keySet * method to crash. */ if (bundle.getKeys() != null) { bundle.keySet(); } } catch (Exception ex) { trace("Failed to eagerly initialize bundle " + bundle + ", " + bundle.getBaseBundleName() + ", reason " + ex.getClass() + " " + ex.getMessage()); } } @Platforms(Platform.HOSTED_ONLY.class) private LocalizationSupport selectLocalizationSupport() { if (optimizedMode) { return new OptimizedLocalizationSupport(allLocales, defaultCharset); } else if (substituteLoadLookup) { List<String> requestedPatterns = Options.LocalizationCompressBundles.getValue().values(); return new BundleContentSubstitutedLocalizationSupport(allLocales, defaultCharset, requestedPatterns, compressionPool); } return new LocalizationSupport(allLocales, defaultCharset); } @Override public void beforeAnalysis(BeforeAnalysisAccess access) { addResourceBundles(); } @Override public void duringAnalysis(DuringAnalysisAccess a) { DuringAnalysisAccessImpl access = (DuringAnalysisAccessImpl) a; scanLocaleCache(access, baseLocaleCacheField); scanLocaleCache(access, localeCacheField); scanLocaleCache(access, candidatesCacheField); access.rescanRoot(langAliasesCacheField); access.rescanRoot(parentLocalesMapField); } private void scanLocaleCache(DuringAnalysisAccessImpl access, Field cacheFieldField) { access.rescanRoot(cacheFieldField); Object localeCache; try { localeCache = cacheFieldField.get(null); } catch (ReflectiveOperationException ex) { throw VMError.shouldNotReachHere(ex); } if (localeCache != null && localeObjectCacheMapField != null) { access.rescanField(localeCache, localeObjectCacheMapField); } } @Platforms(Platform.HOSTED_ONLY.class) private static Set<Locale> processLocalesOption() { Set<Locale> locales = new HashSet<>(); if (Options.IncludeAllLocales.getValue()) { Collections.addAll(locales, Locale.getAvailableLocales()); /* Fallthrough to also allow adding custom locales */ } else { Collections.addAll(locales, MINIMAL_LOCALES); } List<String> invalid = new ArrayList<>(); for (String tag : Options.IncludeLocales.getValue().values()) { Locale locale = LocaleUtil.parseLocaleFromTag(tag); if (locale != null) { locales.add(locale); } else { invalid.add(tag); } } if (!invalid.isEmpty()) { throw UserError.abort("Invalid locales specified: %s", invalid); } return locales; } /** * The JDK performs dynamic lookup of charsets by name, which leads to dynamic class loading. We * cannot do that, because we need to know all classes ahead of time to perform our static * analysis. Therefore, we load and register all standard charsets here. Features that require * more than this can add additional charsets. */ @Platforms(Platform.HOSTED_ONLY.class) private void addCharsets() { if (Options.AddAllCharsets.getValue()) { for (Charset c : Charset.availableCharsets().values()) { addCharset(c); } } else { addCharset(defaultCharset); addCharset(StandardCharsets.US_ASCII); addCharset(StandardCharsets.ISO_8859_1); addCharset(StandardCharsets.UTF_8); addCharset(StandardCharsets.UTF_16BE); addCharset(StandardCharsets.UTF_16LE); addCharset(StandardCharsets.UTF_16); } } @Platforms(Platform.HOSTED_ONLY.class) public static void addCharset(Charset charset) { Map<String, Charset> charsets = ImageSingletons.lookup(LocalizationSupport.class).charsets; charsets.put(charset.name().toLowerCase(Locale.ROOT), charset); for (String name : charset.aliases()) { charsets.put(name.toLowerCase(Locale.ROOT), charset); } /* Eagerly initialize all the tables necessary for decoding / encoding. */ charset.newDecoder(); if (charset.canEncode()) { charset.newEncoder(); } } /* * LocaleServiceProviderPool.spiClasses does not contain all the classes we need, so we list * them manually here. */ private static final List<Class<? extends LocaleServiceProvider>> spiClasses = Arrays.asList( BreakIteratorProvider.class, CollatorProvider.class, DateFormatProvider.class, DateFormatSymbolsProvider.class, DecimalFormatSymbolsProvider.class, NumberFormatProvider.class, CurrencyNameProvider.class, LocaleNameProvider.class, TimeZoneNameProvider.class, JavaTimeDateTimePatternProvider.class, CalendarDataProvider.class, CalendarNameProvider.class, CalendarProvider.class); @Platforms(Platform.HOSTED_ONLY.class) private void addProviders() { OptimizedLocalizationSupport optimizedLocalizationSupport = support.asOptimizedSupport(); for (Class<? extends LocaleServiceProvider> providerClass : spiClasses) { LocaleProviderAdapter adapter = Objects.requireNonNull(LocaleProviderAdapter.getAdapter(providerClass, Locale.ROOT)); LocaleServiceProvider provider = Objects.requireNonNull(adapter.getLocaleServiceProvider(providerClass)); optimizedLocalizationSupport.providerPools.put(providerClass, new Target_sun_util_locale_provider_LocaleServiceProviderPool_OptimizedLocaleMode(provider)); } for (Locale locale : allLocales) { for (Locale candidateLocale : optimizedLocalizationSupport.control.getCandidateLocales("", locale)) { for (Class<? extends LocaleServiceProvider> providerClass : spiClasses) { LocaleProviderAdapter adapter = Objects.requireNonNull(LocaleProviderAdapter.getAdapter(providerClass, candidateLocale)); optimizedLocalizationSupport.adaptersByClass.put(new AdaptersByClassKey(providerClass, candidateLocale), adapter); LocaleProviderAdapter existing = optimizedLocalizationSupport.adaptersByType.put(adapter.getAdapterType(), adapter); assert existing == null || existing == adapter : "Overwriting adapter type with a different adapter"; } } } } /* List of getters to query `LocaleData` for resource bundles. */ private static final List<BiFunction<LocaleData, Locale, ResourceBundle>> localeDataBundleGetters = List.of( LocaleData::getCalendarData, LocaleData::getCurrencyNames, LocaleData::getLocaleNames, LocaleData::getTimeZoneNames, LocaleData::getBreakIteratorInfo, LocaleData::getBreakIteratorResources, LocaleData::getCollationData, LocaleData::getDateFormatData, LocaleData::getNumberFormatData); @Platforms(Platform.HOSTED_ONLY.class) protected void addResourceBundles() { /* * The lookup of localized objects may require the use of more than one * `LocaleProviderAdapter`, so we need resource bundles from all of them. */ LocaleProviderAdapter.getAdapterPreference().stream() .map(LocaleProviderAdapter::forType) .filter(ResourceBundleBasedAdapter.class::isInstance) .map(ResourceBundleBasedAdapter.class::cast) .map(ResourceBundleBasedAdapter::getLocaleData) .forEach(localeData -> { for (var locale : allLocales) { for (var localeDataBundleGetter : localeDataBundleGetters) { ResourceBundle bundle; try { bundle = localeDataBundleGetter.apply(localeData, locale); } catch (MissingResourceException e) { /* * Locale data bundle class names do not contain underscores */ String baseName = e.getClassName().split("_")[0]; prepareNegativeBundle(AccessCondition.unconditional(), baseName, locale, true); continue; /* No bundle for this `locale`. */ } if (bundle instanceof ParallelListResourceBundle) { /* Make sure the `bundle` content is complete. */ localeData.setSupplementary((ParallelListResourceBundle) bundle); } prepareJDKBundle(bundle, locale); } } }); if (!optimizedMode && !substituteLoadLookup) { /* * No eager loading of bundle content, so we need to include the * `sun.text.resources.FormatData` bundle supplement as well. */ prepareBundle(AccessCondition.unconditional(), "sun.text.resources.JavaTimeSupplementary"); } final String[] alwaysRegisteredResourceBundles = new String[]{ "sun.util.logging.resources.logging" }; for (String bundleName : alwaysRegisteredResourceBundles) { prepareBundle(AccessCondition.unconditional(), bundleName); } for (String bundleName : Options.IncludeResourceBundles.getValue().values()) { processRequestedBundle(bundleName); } } @Platforms(Platform.HOSTED_ONLY.class) private void processRequestedBundle(String input) { int splitIndex = input.indexOf('_'); boolean specificLocaleRequested = splitIndex != -1; if (specificLocaleRequested) { Locale locale = splitIndex + 1 < input.length() ? LocaleUtil.parseLocaleFromTag(input.substring(splitIndex + 1)) : Locale.ROOT; if (locale != null) { /* Get rid of locale specific suffix. */ String baseName = input.substring(0, splitIndex); prepareBundle(AccessCondition.unconditional(), baseName, Collections.singletonList(locale)); return; } else { trace("Cannot parse wanted locale " + input.substring(splitIndex + 1) + ", default will be used instead."); } } prepareBundle(AccessCondition.unconditional(), input, allLocales); } @Platforms(Platform.HOSTED_ONLY.class) public void prepareClassResourceBundle(String basename, String className) { Class<?> bundleClass = findClassByName.apply(className); if (bundleClass == null) { /* Unknown classes are ignored */ return; } UserError.guarantee(ResourceBundle.class.isAssignableFrom(bundleClass), "%s is not a subclass of ResourceBundle", bundleClass.getName()); trace("Adding class based resource bundle: " + className + " " + bundleClass); support.registerRequiredReflectionAndResourcesForBundle(basename, Set.of(), false); support.prepareClassResourceBundle(basename, bundleClass); } @Platforms(Platform.HOSTED_ONLY.class) public void prepareBundle(AccessCondition condition, String baseName) { prepareBundle(condition, baseName, allLocales); } private static final String[] RESOURCE_EXTENSION_PREFIXES = new String[]{ "sun.text.resources.cldr", "sun.util.resources.cldr", "sun.text.resources", "sun.util.resources" }; @Platforms(Platform.HOSTED_ONLY.class) public void prepareBundle(AccessCondition condition, String baseName, Collection<Locale> wantedLocales) { prepareBundleInternal(condition, baseName, wantedLocales); String alternativeBundleName = null; for (String resourceExtensionPrefix : RESOURCE_EXTENSION_PREFIXES) { if (baseName.startsWith(resourceExtensionPrefix) && !baseName.startsWith(resourceExtensionPrefix + ".ext")) { alternativeBundleName = baseName.replace(resourceExtensionPrefix, resourceExtensionPrefix + ".ext"); break; } } if (alternativeBundleName != null) { prepareBundleInternal(condition, alternativeBundleName, wantedLocales); } } private void prepareBundleInternal(AccessCondition condition, String baseName, Collection<Locale> wantedLocales) { boolean somethingFound = false; for (Locale locale : wantedLocales) { support.registerBundleLookup(condition, baseName); List<ResourceBundle> resourceBundle; try { resourceBundle = ImageSingletons.lookup(ClassLoaderSupport.class).getResourceBundle(baseName, locale); } catch (MissingResourceException mre) { for (Locale candidateLocale : support.control.getCandidateLocales(baseName, locale)) { prepareNegativeBundle(condition, baseName, candidateLocale, false); } continue; } somethingFound |= !resourceBundle.isEmpty(); for (ResourceBundle bundle : resourceBundle) { prepareBundle(condition, baseName, bundle, locale, false); } } if (!somethingFound) { /* * Try non-compliant class-based bundles. These bundles can't be looked up by the normal * ResourceBundle lookup process, e.g. because they don't have default constructors. */ Class<?> clazz = findClassByName.apply(baseName); if (clazz != null && ResourceBundle.class.isAssignableFrom(clazz)) { trace("Found non-compliant class-based bundle " + clazz); try { support.prepareNonCompliant(clazz); somethingFound = true; } catch (ReflectiveOperationException e) { /* * The bundle does not implement the getContents method, so they cannot be * stored as a DelayedBundle. */ } } } if (!somethingFound) { String errorMessage = "The bundle named: " + baseName + ", has not been found. " + "If the bundle is part of a module, verify the bundle name is a fully qualified class name. Otherwise " + "verify the bundle path is accessible in the classpath."; trace(errorMessage); prepareNegativeBundle(condition, baseName, Locale.ROOT, false); for (String language : wantedLocales.stream().map(Locale::getLanguage).collect(Collectors.toSet())) { prepareNegativeBundle(condition, baseName, Locale.of(language), false); } for (Locale locale : wantedLocales) { if (!locale.getCountry().isEmpty()) { prepareNegativeBundle(condition, baseName, locale, false); } } } } @Platforms(Platform.HOSTED_ONLY.class) protected void prepareNegativeBundle(AccessCondition condition, String baseName, Locale locale, boolean jdkBundle) { support.registerBundleLookup(condition, baseName); support.registerRequiredReflectionAndResourcesForBundleAndLocale(baseName, locale, jdkBundle); } @Platforms(Platform.HOSTED_ONLY.class) protected void prepareJDKBundle(ResourceBundle bundle, Locale locale) { String baseName = bundle.getBaseBundleName(); prepareBundle(AccessCondition.unconditional(), baseName, bundle, locale, true); } @Platforms(Platform.HOSTED_ONLY.class) private void prepareBundle(AccessCondition condition, String bundleName, ResourceBundle bundle, Locale locale, boolean jdkBundle) { trace("Adding bundle " + bundleName + ", locale " + locale + " with condition " + condition); /* * Ensure that the bundle contents are loaded. We need to walk the whole bundle parent chain * down to the root. */ for (ResourceBundle cur = bundle; cur != null; cur = SharedSecrets.getJavaUtilResourceBundleAccess().getParent(cur)) { /* Register all bundles with their corresponding locales */ support.prepareBundle(bundleName, cur, this.imageClassLoader::findModule, cur.getLocale(), jdkBundle); } /* * Finally, register the requested bundle with requested locale (Requested might be more * specific than the actual bundle locale */ support.prepareBundle(bundleName, bundle, this.imageClassLoader::findModule, locale, jdkBundle); } @Platforms(Platform.HOSTED_ONLY.class) protected void trace(String msg) { if (trace) { System.out.println(msg); } } }
googleapis/google-cloud-java
35,122
java-gkehub/proto-google-cloud-gkehub-v1alpha/src/main/java/com/google/cloud/gkehub/servicemesh/v1alpha/FeatureState.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1alpha/servicemesh/servicemesh.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.gkehub.servicemesh.v1alpha; /** * * * <pre> * **Service Mesh**: State for the whole Hub, as analyzed by the Service Mesh * Hub Controller. * </pre> * * Protobuf type {@code google.cloud.gkehub.servicemesh.v1alpha.FeatureState} */ public final class FeatureState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gkehub.servicemesh.v1alpha.FeatureState) FeatureStateOrBuilder { private static final long serialVersionUID = 0L; // Use FeatureState.newBuilder() to construct. private FeatureState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FeatureState() { analysisMessages_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FeatureState(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto .internal_static_google_cloud_gkehub_servicemesh_v1alpha_FeatureState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto .internal_static_google_cloud_gkehub_servicemesh_v1alpha_FeatureState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.class, com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.Builder.class); } public static final int ANALYSIS_MESSAGES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage> analysisMessages_; /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage> getAnalysisMessagesList() { return analysisMessages_; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder> getAnalysisMessagesOrBuilderList() { return analysisMessages_; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public int getAnalysisMessagesCount() { return analysisMessages_.size(); } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage getAnalysisMessages( int index) { return analysisMessages_.get(index); } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder getAnalysisMessagesOrBuilder(int index) { return analysisMessages_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < analysisMessages_.size(); i++) { output.writeMessage(1, analysisMessages_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < analysisMessages_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, analysisMessages_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState)) { return super.equals(obj); } com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState other = (com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState) obj; if (!getAnalysisMessagesList().equals(other.getAnalysisMessagesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAnalysisMessagesCount() > 0) { hash = (37 * hash) + ANALYSIS_MESSAGES_FIELD_NUMBER; hash = (53 * hash) + getAnalysisMessagesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * **Service Mesh**: State for the whole Hub, as analyzed by the Service Mesh * Hub Controller. * </pre> * * Protobuf type {@code google.cloud.gkehub.servicemesh.v1alpha.FeatureState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gkehub.servicemesh.v1alpha.FeatureState) com.google.cloud.gkehub.servicemesh.v1alpha.FeatureStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto .internal_static_google_cloud_gkehub_servicemesh_v1alpha_FeatureState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto .internal_static_google_cloud_gkehub_servicemesh_v1alpha_FeatureState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.class, com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.Builder.class); } // Construct using com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (analysisMessagesBuilder_ == null) { analysisMessages_ = java.util.Collections.emptyList(); } else { analysisMessages_ = null; analysisMessagesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto .internal_static_google_cloud_gkehub_servicemesh_v1alpha_FeatureState_descriptor; } @java.lang.Override public com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState getDefaultInstanceForType() { return com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.getDefaultInstance(); } @java.lang.Override public com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState build() { com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState buildPartial() { com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState result = new com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState result) { if (analysisMessagesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { analysisMessages_ = java.util.Collections.unmodifiableList(analysisMessages_); bitField0_ = (bitField0_ & ~0x00000001); } result.analysisMessages_ = analysisMessages_; } else { result.analysisMessages_ = analysisMessagesBuilder_.build(); } } private void buildPartial0(com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState) { return mergeFrom((com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState other) { if (other == com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState.getDefaultInstance()) return this; if (analysisMessagesBuilder_ == null) { if (!other.analysisMessages_.isEmpty()) { if (analysisMessages_.isEmpty()) { analysisMessages_ = other.analysisMessages_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAnalysisMessagesIsMutable(); analysisMessages_.addAll(other.analysisMessages_); } onChanged(); } } else { if (!other.analysisMessages_.isEmpty()) { if (analysisMessagesBuilder_.isEmpty()) { analysisMessagesBuilder_.dispose(); analysisMessagesBuilder_ = null; analysisMessages_ = other.analysisMessages_; bitField0_ = (bitField0_ & ~0x00000001); analysisMessagesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAnalysisMessagesFieldBuilder() : null; } else { analysisMessagesBuilder_.addAllMessages(other.analysisMessages_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage m = input.readMessage( com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.parser(), extensionRegistry); if (analysisMessagesBuilder_ == null) { ensureAnalysisMessagesIsMutable(); analysisMessages_.add(m); } else { analysisMessagesBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage> analysisMessages_ = java.util.Collections.emptyList(); private void ensureAnalysisMessagesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { analysisMessages_ = new java.util.ArrayList<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>( analysisMessages_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder> analysisMessagesBuilder_; /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage> getAnalysisMessagesList() { if (analysisMessagesBuilder_ == null) { return java.util.Collections.unmodifiableList(analysisMessages_); } else { return analysisMessagesBuilder_.getMessageList(); } } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public int getAnalysisMessagesCount() { if (analysisMessagesBuilder_ == null) { return analysisMessages_.size(); } else { return analysisMessagesBuilder_.getCount(); } } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage getAnalysisMessages( int index) { if (analysisMessagesBuilder_ == null) { return analysisMessages_.get(index); } else { return analysisMessagesBuilder_.getMessage(index); } } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAnalysisMessages( int index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage value) { if (analysisMessagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnalysisMessagesIsMutable(); analysisMessages_.set(index, value); onChanged(); } else { analysisMessagesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setAnalysisMessages( int index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder builderForValue) { if (analysisMessagesBuilder_ == null) { ensureAnalysisMessagesIsMutable(); analysisMessages_.set(index, builderForValue.build()); onChanged(); } else { analysisMessagesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addAnalysisMessages( com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage value) { if (analysisMessagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnalysisMessagesIsMutable(); analysisMessages_.add(value); onChanged(); } else { analysisMessagesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addAnalysisMessages( int index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage value) { if (analysisMessagesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnalysisMessagesIsMutable(); analysisMessages_.add(index, value); onChanged(); } else { analysisMessagesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addAnalysisMessages( com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder builderForValue) { if (analysisMessagesBuilder_ == null) { ensureAnalysisMessagesIsMutable(); analysisMessages_.add(builderForValue.build()); onChanged(); } else { analysisMessagesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addAnalysisMessages( int index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder builderForValue) { if (analysisMessagesBuilder_ == null) { ensureAnalysisMessagesIsMutable(); analysisMessages_.add(index, builderForValue.build()); onChanged(); } else { analysisMessagesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addAllAnalysisMessages( java.lang.Iterable<? extends com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage> values) { if (analysisMessagesBuilder_ == null) { ensureAnalysisMessagesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, analysisMessages_); onChanged(); } else { analysisMessagesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearAnalysisMessages() { if (analysisMessagesBuilder_ == null) { analysisMessages_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { analysisMessagesBuilder_.clear(); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder removeAnalysisMessages(int index) { if (analysisMessagesBuilder_ == null) { ensureAnalysisMessagesIsMutable(); analysisMessages_.remove(index); onChanged(); } else { analysisMessagesBuilder_.remove(index); } return this; } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder getAnalysisMessagesBuilder(int index) { return getAnalysisMessagesFieldBuilder().getBuilder(index); } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder getAnalysisMessagesOrBuilder(int index) { if (analysisMessagesBuilder_ == null) { return analysisMessages_.get(index); } else { return analysisMessagesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public java.util.List< ? extends com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder> getAnalysisMessagesOrBuilderList() { if (analysisMessagesBuilder_ != null) { return analysisMessagesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(analysisMessages_); } } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder addAnalysisMessagesBuilder() { return getAnalysisMessagesFieldBuilder() .addBuilder( com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.getDefaultInstance()); } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder addAnalysisMessagesBuilder(int index) { return getAnalysisMessagesFieldBuilder() .addBuilder( index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.getDefaultInstance()); } /** * * * <pre> * Output only. Results of running Service Mesh analyzers. * </pre> * * <code> * repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder> getAnalysisMessagesBuilderList() { return getAnalysisMessagesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder> getAnalysisMessagesFieldBuilder() { if (analysisMessagesBuilder_ == null) { analysisMessagesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder>( analysisMessages_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); analysisMessages_ = null; } return analysisMessagesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gkehub.servicemesh.v1alpha.FeatureState) } // @@protoc_insertion_point(class_scope:google.cloud.gkehub.servicemesh.v1alpha.FeatureState) private static final com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState(); } public static com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FeatureState> PARSER = new com.google.protobuf.AbstractParser<FeatureState>() { @java.lang.Override public FeatureState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FeatureState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FeatureState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gkehub.servicemesh.v1alpha.FeatureState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
35,278
corba/src/share/classes/com/sun/corba/se/impl/interceptors/RequestInfoImpl.java
/* * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.corba.se.impl.interceptors; import java.io.IOException ; import java.lang.reflect.Method ; import java.lang.reflect.InvocationTargetException ; import java.util.HashMap ; import org.omg.PortableInterceptor.ForwardRequest; import org.omg.PortableInterceptor.InvalidSlot; import org.omg.PortableInterceptor.RequestInfo; import org.omg.PortableInterceptor.LOCATION_FORWARD; import org.omg.IOP.TaggedProfile; import org.omg.IOP.TaggedComponent; import org.omg.IOP.ServiceContextHelper; import org.omg.Messaging.SYNC_WITH_TRANSPORT; import org.omg.CORBA.ParameterMode; import org.omg.CORBA.Any; import org.omg.CORBA.BAD_INV_ORDER; import org.omg.CORBA.BAD_PARAM; import org.omg.CORBA.CompletionStatus; import org.omg.CORBA.Context; import org.omg.CORBA.ContextList; import org.omg.CORBA.CTX_RESTRICT_SCOPE; import org.omg.CORBA.ExceptionList; import org.omg.CORBA.INTERNAL; import org.omg.CORBA.LocalObject; import org.omg.CORBA.NamedValue; import org.omg.CORBA.NO_IMPLEMENT; import org.omg.CORBA.NO_RESOURCES; import org.omg.CORBA.NVList; import org.omg.CORBA.Object; import org.omg.CORBA.Policy; import org.omg.CORBA.SystemException; import org.omg.CORBA.TypeCode; import org.omg.CORBA.UNKNOWN; import org.omg.CORBA.UserException; import org.omg.CORBA.portable.ApplicationException; import org.omg.CORBA.portable.Delegate; import org.omg.CORBA.portable.InputStream; import org.omg.Dynamic.Parameter; import com.sun.corba.se.spi.legacy.connection.Connection; import com.sun.corba.se.spi.legacy.interceptor.RequestInfoExt; import com.sun.corba.se.spi.ior.IOR; import com.sun.corba.se.spi.ior.iiop.GIOPVersion; import com.sun.corba.se.spi.orb.ORB; import com.sun.corba.se.spi.logging.CORBALogDomains; import com.sun.corba.se.spi.servicecontext.ServiceContexts; import com.sun.corba.se.spi.servicecontext.UnknownServiceContext; import com.sun.corba.se.impl.encoding.CDRInputStream_1_0; import com.sun.corba.se.impl.encoding.EncapsOutputStream; import com.sun.corba.se.impl.orbutil.ORBUtility; import com.sun.corba.se.impl.util.RepositoryId; import com.sun.corba.se.impl.logging.InterceptorsSystemException; import com.sun.corba.se.impl.logging.OMGSystemException; import sun.corba.SharedSecrets; /** * Implementation of the RequestInfo interface as specified in * orbos/99-12-02 section 5.4.1. */ public abstract class RequestInfoImpl extends LocalObject implements RequestInfo, RequestInfoExt { ////////////////////////////////////////////////////////////////////// // // NOTE: IF AN ATTRIBUTE IS ADDED, PLEASE UPDATE RESET(); // ////////////////////////////////////////////////////////////////////// // The ORB from which to get PICurrent and other info protected ORB myORB; protected InterceptorsSystemException wrapper ; protected OMGSystemException stdWrapper ; // The number of interceptors actually invoked for this client request. // See setFlowStackIndex for a detailed description. protected int flowStackIndex = 0; // The type of starting point call to make to the interceptors // See ClientRequestInfoImpl and ServerRequestInfoImpl for a list of // appropriate constants. protected int startingPointCall; // The type of intermediate point call to make to the interceptors // See ServerRequestInfoImpl for a list of appropriate constants. // This does not currently apply to client request interceptors but is // here in case intermediate points are introduced in the future. protected int intermediatePointCall; // The type of ending point call to make to the interceptors // See ClientRequestInfoImpl and ServerRequestInfoImpl for a list of // appropriate constants. protected int endingPointCall; // The reply status to return in reply_status. This is initialized // to UNINITIALIZED so that we can tell if this has been set or not. protected short replyStatus = UNINITIALIZED; // Constant for an uninitizlied reply status. protected static final short UNINITIALIZED = -1; // Which points we are currently executing (so we can implement the // validity table). protected int currentExecutionPoint; protected static final int EXECUTION_POINT_STARTING = 0; protected static final int EXECUTION_POINT_INTERMEDIATE = 1; protected static final int EXECUTION_POINT_ENDING = 2; // Set to true if all interceptors have had all their points // executed. protected boolean alreadyExecuted; // Sources of request information protected Connection connection; protected ServiceContexts serviceContexts; // The ForwardRequest object if this request is being forwarded. // Either the forwardRequest or the forwardRequestIOR field is set. // When set, the other field is set to null initially. If the other // field is queried, it is lazily calculated and cached. These // two attributes are always kept in sync. protected ForwardRequest forwardRequest; protected IOR forwardRequestIOR; // PICurrent's SlotTable protected SlotTable slotTable; // The exception to be returned by received_exception and // received_exception_id protected Exception exception; ////////////////////////////////////////////////////////////////////// // // NOTE: IF AN ATTRIBUTE IS ADDED, PLEASE UPDATE RESET(); // ////////////////////////////////////////////////////////////////////// /** * Reset the info object so that it can be reused for a retry, * for example. */ void reset() { // Please keep these in the same order as declared above. flowStackIndex = 0; startingPointCall = 0; intermediatePointCall = 0; endingPointCall = 0; // 6763340 setReplyStatus( UNINITIALIZED ) ; currentExecutionPoint = EXECUTION_POINT_STARTING; alreadyExecuted = false; connection = null; serviceContexts = null; forwardRequest = null; forwardRequestIOR = null; exception = null; // We don't need to reset the Slots because they are // already in the clean state after recieve_<point> interceptor // are called. } /* ********************************************************************** * Access protection **********************************************************************/ // Method IDs for all methods in RequestInfo. This allows for a // convenient O(1) lookup for checkAccess(). protected static final int MID_REQUEST_ID = 0; protected static final int MID_OPERATION = 1; protected static final int MID_ARGUMENTS = 2; protected static final int MID_EXCEPTIONS = 3; protected static final int MID_CONTEXTS = 4; protected static final int MID_OPERATION_CONTEXT = 5; protected static final int MID_RESULT = 6; protected static final int MID_RESPONSE_EXPECTED = 7; protected static final int MID_SYNC_SCOPE = 8; protected static final int MID_REPLY_STATUS = 9; protected static final int MID_FORWARD_REFERENCE = 10; protected static final int MID_GET_SLOT = 11; protected static final int MID_GET_REQUEST_SERVICE_CONTEXT = 12; protected static final int MID_GET_REPLY_SERVICE_CONTEXT = 13; // The last value from RequestInfo (be sure to update this): protected static final int MID_RI_LAST = 13; /* ********************************************************************** * Public interfaces **********************************************************************/ /** * Creates a new RequestInfoImpl object. */ public RequestInfoImpl( ORB myORB ) { super(); this.myORB = myORB; wrapper = InterceptorsSystemException.get( myORB, CORBALogDomains.RPC_PROTOCOL ) ; stdWrapper = OMGSystemException.get( myORB, CORBALogDomains.RPC_PROTOCOL ) ; // Capture the current TSC and make it the RSC of this request. PICurrent current = (PICurrent)(myORB.getPIHandler().getPICurrent()); slotTable = current.getSlotTable( ); } /** * Implementation for request_id() differs for client and server * implementations. * * Uniquely identifies an active request/reply sequence. Once a * request/reply sequence is concluded this ID may be reused. (this * is NOT necessarily the same as the GIOP request_id). */ abstract public int request_id (); /** * Implementation for operation() differs for client and server * implementations. * * The name of the operation being invoked. */ abstract public String operation (); /** * This method returns the list of arguments for the operation that was * invoked. It raises NO_RESOURCES exception if the operation is not invoked * by using DII mechanism. */ abstract public Parameter[] arguments (); /** * This method returns the list of exceptios that was raised when the * operation was invoked. It raises NO_RESOURCES exception if the operation * is not invoked by using DII mechanism. */ abstract public TypeCode[] exceptions (); /** * This method returns the list of contexts for the DII operation. * It raises NO_RESOURCES exception if the operation is not invoked by * using DII mechanism. */ abstract public String[] contexts (); /** * This method returns the list of operation_context for the DII operation. * It raises NO_RESOURCES exception if the operation is not invoked by * using DII mechanism. */ abstract public String[] operation_context (); /** * This method returns the result from the invoked DII operation. * It raises NO_RESOURCES exception if the operation is not invoked by * using DII mechanism. */ abstract public Any result (); /** * Implementation for response_expected() differs for client and server * implementations. * * Indicates whether a response is expected. On the client, a reply is * not returned when response_expected is false, so receive_reply cannot * be called. receive_other is called unless an exception occurs, in * which case receive_exception is called. On the client, within * send_poll, this attribute is true. */ abstract public boolean response_expected (); /** * Defined in the Messaging specification. Pertinent only when * response_expected is false. If response_expected is true, the value * of sync_scope is undefined. It defines how far the request shall * progress before control is returned to the client. This attribute may * have one of the follwing values: * <ul> * <li>Messaging::SYNC_NONE</li> * <li>Messaging::SYNC_WITH_TRANSPORT</li> * <li>Messaging::SYNC_WITH_SERVER</li> * <li>Messaging::SYNC_WITH_TARGET</li> * </ul> */ public short sync_scope (){ checkAccess( MID_SYNC_SCOPE ); return SYNC_WITH_TRANSPORT.value; // REVISIT - get from MessageMediator } /** * Describes the state of the result of the operation invocation. Its * value can be one of the following: * <ul> * <li>PortableInterceptor::SUCCESSFUL</li> * <li>PortableInterceptor::SYSTEM_EXCEPTION</li> * <li>PortableInterceptor::USER_EXCEPTION</li> * <li>PortableInterceptor::LOCATION_FORWARD</li> * <li>PortableInterceptor::TRANSPORT_RETRY</li> * </ul> */ public short reply_status (){ checkAccess( MID_REPLY_STATUS ); return replyStatus; } /** * Implementation for forward_reference() differs for client and server * implementations. * * If the reply_status attribute is LOCATION_FORWARD * then this attribute will contain the object * to which the request will be forwarded. It is indeterminate whether a * forwarded request will actually occur. */ abstract public Object forward_reference (); /** * Returns the data from the given slot of the PortableInterceptor::Current * that is in the scope of the request. * <p> * If the given slot has not been set, then an any containing a type code * with a TCKind value of tk_null is returned. * <p> * If the ID does not define an allocated slot, InvalidSlot is raised. */ public Any get_slot (int id) throws InvalidSlot { // access is currently valid for all states: //checkAccess( MID_GET_SLOT ); // Delegate the call to the slotTable which was set when RequestInfo was // created. return slotTable.get_slot( id ); } /** * Implementation for get_request_service_context() differs for client * and server implementations. * * This operation returns a copy of the service context with the given ID * that is associated with the request. If the request's service context * does not contain an etry for that ID, BAD_PARAM with a minor code of * TBD_BP is raised. */ abstract public org.omg.IOP.ServiceContext get_request_service_context(int id); /** * Implementation for get_reply_service_context() differs for client * and server implementations. * * This operation returns a copy of the service context with the given ID * that is associated with the reply. IF the request's service context * does not contain an entry for that ID, BAD_PARAM with a minor code of * TBD_BP is raised. */ abstract public org.omg.IOP.ServiceContext get_reply_service_context (int id); // NOTE: When adding a method, be sure to: // 1. Add a MID_* constant for that method // 2. Call checkAccess at the start of the method // 3. Define entries in the validCall[][] table for interception points // in both ClientRequestInfoImpl and ServerRequestInfoImpl. /* ********************************************************************** * Proprietary methods **********************************************************************/ /** * @return The connection on which the request is made. * * Note: we store the connection as an internal type but * expose it here as an external type. */ public com.sun.corba.se.spi.legacy.connection.Connection connection() { return connection; } /* ********************************************************************** * Private utility methods **********************************************************************/ /** * Inserts the UserException inside the given ApplicationException * into the given Any. Throws an UNKNOWN with minor code * OMGSYstemException.UNKNOWN_USER_EXCEPTION if the Helper class could not be * found to insert it with. */ private void insertApplicationException( ApplicationException appException, Any result ) throws UNKNOWN { try { // Extract the UserException from the ApplicationException. // Look up class name from repository id: RepositoryId repId = RepositoryId.cache.getId( appException.getId() ); String className = repId.getClassName(); // Find the read method on the helper class: String helperClassName = className + "Helper"; Class<?> helperClass = SharedSecrets.getJavaCorbaAccess().loadClass( helperClassName ); Class[] readParams = new Class[1]; readParams[0] = org.omg.CORBA.portable.InputStream.class; Method readMethod = helperClass.getMethod( "read", readParams ); // Invoke the read method, passing in the input stream to // retrieve the user exception. Mark and reset the stream // as to not disturb it. InputStream ueInputStream = appException.getInputStream(); ueInputStream.mark( 0 ); UserException userException = null; try { java.lang.Object[] readArguments = new java.lang.Object[1]; readArguments[0] = ueInputStream; userException = (UserException)readMethod.invoke( null, readArguments ); } finally { try { ueInputStream.reset(); } catch( IOException e ) { throw wrapper.markAndResetFailed( e ) ; } } // Insert this UserException into the provided Any using the // helper class. insertUserException( userException, result ); } catch( ClassNotFoundException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ) ; } catch( NoSuchMethodException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ) ; } catch( SecurityException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ) ; } catch( IllegalAccessException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ) ; } catch( IllegalArgumentException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ) ; } catch( InvocationTargetException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ) ; } } /** * Inserts the UserException into the given Any. * Throws an UNKNOWN with minor code * OMGSYstemException.UNKNOWN_USER_EXCEPTION if the Helper class could not be * found to insert it with. */ private void insertUserException( UserException userException, Any result ) throws UNKNOWN { try { // Insert this UserException into the provided Any using the // helper class. if( userException != null ) { Class exceptionClass = userException.getClass(); String className = exceptionClass.getName(); String helperClassName = className + "Helper"; Class<?> helperClass = SharedSecrets.getJavaCorbaAccess().loadClass( helperClassName ); // Find insert( Any, class ) method Class[] insertMethodParams = new Class[2]; insertMethodParams[0] = org.omg.CORBA.Any.class; insertMethodParams[1] = exceptionClass; Method insertMethod = helperClass.getMethod( "insert", insertMethodParams ); // Call helper.insert( result, userException ): java.lang.Object[] insertMethodArguments = new java.lang.Object[2]; insertMethodArguments[0] = result; insertMethodArguments[1] = userException; insertMethod.invoke( null, insertMethodArguments ); } } catch( ClassNotFoundException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ); } catch( NoSuchMethodException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ); } catch( SecurityException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ); } catch( IllegalAccessException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ); } catch( IllegalArgumentException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ); } catch( InvocationTargetException e ) { throw stdWrapper.unknownUserException( CompletionStatus.COMPLETED_MAYBE, e ); } } /* ********************************************************************** * Protected utility methods **********************************************************************/ /** * Internal utility method to convert an NVList into a PI Parameter[] */ protected Parameter[] nvListToParameterArray( NVList parNVList ) { // _REVISIT_ This utility method should probably be doing a deep // copy so interceptor can't accidentally change the arguments. int count = parNVList.count(); Parameter[] plist = new Parameter[count]; try { for( int i = 0; i < count; i++ ) { Parameter p = new Parameter(); plist[i] = p; NamedValue nv = parNVList.item( i ); plist[i].argument = nv.value(); // ParameterMode spec can be found in 99-10-07.pdf // Section:10.5.22 // nv.flags spec can be found in 99-10-07.pdf // Section 7.1.1 // nv.flags has ARG_IN as 1, ARG_OUT as 2 and ARG_INOUT as 3 // To convert this into enum PARAM_IN, PARAM_OUT and // PARAM_INOUT the value is subtracted by 1. plist[i].mode = ParameterMode.from_int( nv.flags() - 1 ); } } catch ( Exception e ) { throw wrapper.exceptionInArguments( e ) ; } return plist; } /** * Utility to wrap the given Exception in an Any object and return it. * If the exception is a UserException which cannot be inserted into * an any, then this returns an Any containing the system exception * UNKNOWN. */ protected Any exceptionToAny( Exception exception ){ Any result = myORB.create_any(); if( exception == null ) { // Note: exception should never be null here since we will throw // a BAD_INV_ORDER if this is not called from receive_exception. throw wrapper.exceptionWasNull2() ; } else if( exception instanceof SystemException ) { ORBUtility.insertSystemException( (SystemException)exception, result ); } else if( exception instanceof ApplicationException ) { // Use the Helper class for this exception to insert it into an // Any. try { // Insert the user exception inside the application exception // into the Any result: ApplicationException appException = (ApplicationException)exception; insertApplicationException( appException, result ); } catch( UNKNOWN e ) { // As per ptc/00-08-06, 21.3.13.4. if we cannot find the // appropriate class, then return an any containing UNKNOWN, // with a minor code of 1. This is conveniently the same // exception that is returned from the // insertApplicationException utility method. ORBUtility.insertSystemException( e, result ); } } else if( exception instanceof UserException ) { try { UserException userException = (UserException)exception; insertUserException( userException, result ); } catch( UNKNOWN e ) { ORBUtility.insertSystemException( e, result ); } } return result; } /** * Utility method to look up a service context with the given id and * convert it to an IOP.ServiceContext. Uses the given HashMap as * a cache. If not found in cache, the result is inserted in the cache. */ protected org.omg.IOP.ServiceContext getServiceContext ( HashMap cachedServiceContexts, ServiceContexts serviceContexts, int id ) { org.omg.IOP.ServiceContext result = null; Integer integerId = new Integer( id ); // Search cache first: result = (org.omg.IOP.ServiceContext) cachedServiceContexts.get( integerId ); // null could normally mean that either we cached the value null // or it's not in the cache. However, there is no way for us to // cache the value null in the following code. if( result == null ) { // Not in cache. Find it and put in cache. // Get the desired "core" service context. com.sun.corba.se.spi.servicecontext.ServiceContext context = serviceContexts.get( id ); if (context == null) throw stdWrapper.invalidServiceContextId() ; // Convert the "core" service context to an // "IOP" ServiceContext by writing it to a // CDROutputStream and reading it back. EncapsOutputStream out = sun.corba.OutputStreamFactory.newEncapsOutputStream(myORB); context.write( out, GIOPVersion.V1_2 ); InputStream inputStream = out.create_input_stream(); result = ServiceContextHelper.read( inputStream ); cachedServiceContexts.put( integerId, result ); } // Good citizen: For increased efficiency, we assume that interceptors // will not modify the returned ServiceContext. Otherwise, we would // have to make a deep copy. return result; } /** * Utility method to add an IOP.ServiceContext to a core.ServiceContexts * object. If replace is true, any service context with the given id * is replaced. * <p> * Raises BAD_INV_ORDER if replace is false and a service context with * the given id already exists. * <p> * Uses the given HashMap as a cache. If a service context is placed * in the container, it goes in the HashMap as well. */ protected void addServiceContext( HashMap cachedServiceContexts, ServiceContexts serviceContexts, org.omg.IOP.ServiceContext service_context, boolean replace ) { int id = 0 ; // Convert IOP.service_context to core.ServiceContext: EncapsOutputStream outputStream = sun.corba.OutputStreamFactory.newEncapsOutputStream(myORB); InputStream inputStream = null; UnknownServiceContext coreServiceContext = null; ServiceContextHelper.write( outputStream, service_context ); inputStream = outputStream.create_input_stream(); // Constructor expects id to already have been read from stream. coreServiceContext = new UnknownServiceContext( inputStream.read_long(), (org.omg.CORBA_2_3.portable.InputStream)inputStream ); id = coreServiceContext.getId(); if (serviceContexts.get(id) != null) if (replace) serviceContexts.delete( id ); else throw stdWrapper.serviceContextAddFailed( new Integer(id) ) ; serviceContexts.put( coreServiceContext ); // Place IOP.ServiceContext in cache as well: cachedServiceContexts.put( new Integer( id ), service_context ); } /** * Sets the number of interceptors whose starting interception * points were successfully invoked on this client call. As specified * in orbos/99-12-02, section 5.2.1., not all interceptors will * be invoked if a ForwardRequest exception or a system exception * is raised. This keeps track of how many were successfully executed * so we know not to execute the corresponding ending interception * points for the interceptors whose starting interception points * were not completed. This simulates the "Flow Stack Visual Model" * presented in section 5.1.3.*/ protected void setFlowStackIndex(int num ) { this.flowStackIndex = num; } /** * Returns the number of interceptors whose starting interception * points were actually invoked on this client request. See * setFlowStackIndex for more details. */ protected int getFlowStackIndex() { return this.flowStackIndex; } /** * Sets which ending interception point should be called * for each interceptor in the virtual flow stack. */ protected void setEndingPointCall( int call ) { this.endingPointCall = call; } /** * Retrieves the current ending point call type (see * setEndingPointCall for more details). */ protected int getEndingPointCall() { return this.endingPointCall; } /** * Sets which intermediate interception point should be called * for each interceptor in the virtual flow stack. */ protected void setIntermediatePointCall( int call ) { this.intermediatePointCall = call; } /** * Retrieves the current intermediate point call type (see * setEndingPointCall for more details). */ protected int getIntermediatePointCall() { return this.intermediatePointCall; } /** * Sets which starting interception point should be called * for each interceptor in the virtual flow stack. */ protected void setStartingPointCall( int call ) { this.startingPointCall = call; } /** * Retrieves the current starting point call type (see * setStartingPointCall for more details). */ protected int getStartingPointCall() { return this.startingPointCall; } /** * Returns true if all interceptors' starting and ending points * have already executed to completion, or false if not yet. */ protected boolean getAlreadyExecuted() { return this.alreadyExecuted; } /** * Sets whether all interceotrs' starting and ending points * have already been executed to completion. */ protected void setAlreadyExecuted( boolean alreadyExecuted ) { this.alreadyExecuted = alreadyExecuted; } /** * Sets the value to be returned by reply_status */ protected void setReplyStatus( short replyStatus ) { this.replyStatus = replyStatus; } /** * Gets the current reply_status without doing an access check * (available only to package and subclasses) */ protected short getReplyStatus() { return this.replyStatus; } /** * Stores the given ForwardRequest object for later analysis. * This version supplements setForwardRequest( IOR ); */ protected void setForwardRequest( ForwardRequest forwardRequest ) { this.forwardRequest = forwardRequest; this.forwardRequestIOR = null; } /** * Stores the given IOR for later forward request analysis. * This version supplements setForwardRequest( ForwardRequest ); */ protected void setForwardRequest( IOR ior ) { this.forwardRequestIOR = ior; this.forwardRequest = null; } /** * Retrieves the ForwardRequest object as a ForwardRequest exception. */ protected ForwardRequest getForwardRequestException() { if( this.forwardRequest == null ) { if( this.forwardRequestIOR != null ) { // Convert the internal IOR to a forward request exception // by creating an object reference. org.omg.CORBA.Object obj = iorToObject(this.forwardRequestIOR); this.forwardRequest = new ForwardRequest( obj ); } } return this.forwardRequest; } /** * Retrieves the IOR of the ForwardRequest exception. */ protected IOR getForwardRequestIOR() { if( this.forwardRequestIOR == null ) { if( this.forwardRequest != null ) { this.forwardRequestIOR = ORBUtility.getIOR( this.forwardRequest.forward ) ; } } return this.forwardRequestIOR; } /** * Sets the exception to be returned by received_exception and * received_exception_id. */ protected void setException( Exception exception ) { this.exception = exception; } /** * Returns the exception to be returned by received_exception and * received_exception_id. */ Exception getException() { return this.exception; } /** * Sets the execution point that we are currently executing * (starting points, intermediate points, or ending points). * This allows us to enforce the validity table. */ protected void setCurrentExecutionPoint( int executionPoint ) { this.currentExecutionPoint = executionPoint; } /** * Check whether the caller is allowed to access this method at * this particular time. This is overridden in subclasses to implement * the validity table specified in ptc/00-04-05, table 21-1 and 21-2. * The currentExecutionPoint attribute is checked, and if access is * forbidden at this time, BAD_INV_ORDER is raised with a minor code of * TBD_BIO. * * @param methodID The ID of this method, one of the MID_* constants. * This allows us to easily look up the method access in a table. * Note that method ids may overlap between subclasses. */ protected abstract void checkAccess( int methodID ) throws BAD_INV_ORDER; /** * The server side does an explicit set rather than taking the * current PICurrent table as is done in the general RequestInfoImpl * constructor. */ void setSlotTable(SlotTable slotTable) { this.slotTable = slotTable; } protected org.omg.CORBA.Object iorToObject( IOR ior ) { return ORBUtility.makeObjectReference( ior ) ; } }
google/schemaorg-java
35,099
src/main/java/com/google/schemaorg/core/impl/OfferImpl.java
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.SchemaOrgTypeImpl; import com.google.schemaorg.ValueType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.DateTime; import com.google.schemaorg.core.datatype.Number; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.GoogConstants; import com.google.schemaorg.goog.PopularityScoreSpecification; /** Implementation of {@link Offer}. */ public class OfferImpl extends IntangibleImpl implements Offer { private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet(); private static ImmutableSet<String> initializePropertySet() { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.add(CoreConstants.PROPERTY_ACCEPTED_PAYMENT_METHOD); builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE); builder.add(CoreConstants.PROPERTY_ADD_ON); builder.add(CoreConstants.PROPERTY_ADVANCE_BOOKING_REQUIREMENT); builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING); builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME); builder.add(CoreConstants.PROPERTY_AREA_SERVED); builder.add(CoreConstants.PROPERTY_AVAILABILITY); builder.add(CoreConstants.PROPERTY_AVAILABILITY_ENDS); builder.add(CoreConstants.PROPERTY_AVAILABILITY_STARTS); builder.add(CoreConstants.PROPERTY_AVAILABLE_AT_OR_FROM); builder.add(CoreConstants.PROPERTY_AVAILABLE_DELIVERY_METHOD); builder.add(CoreConstants.PROPERTY_BUSINESS_FUNCTION); builder.add(CoreConstants.PROPERTY_CATEGORY); builder.add(CoreConstants.PROPERTY_DELIVERY_LEAD_TIME); builder.add(CoreConstants.PROPERTY_DESCRIPTION); builder.add(CoreConstants.PROPERTY_ELIGIBLE_CUSTOMER_TYPE); builder.add(CoreConstants.PROPERTY_ELIGIBLE_DURATION); builder.add(CoreConstants.PROPERTY_ELIGIBLE_QUANTITY); builder.add(CoreConstants.PROPERTY_ELIGIBLE_REGION); builder.add(CoreConstants.PROPERTY_ELIGIBLE_TRANSACTION_VOLUME); builder.add(CoreConstants.PROPERTY_GTIN12); builder.add(CoreConstants.PROPERTY_GTIN13); builder.add(CoreConstants.PROPERTY_GTIN14); builder.add(CoreConstants.PROPERTY_GTIN8); builder.add(CoreConstants.PROPERTY_IMAGE); builder.add(CoreConstants.PROPERTY_INCLUDES_OBJECT); builder.add(CoreConstants.PROPERTY_INELIGIBLE_REGION); builder.add(CoreConstants.PROPERTY_INVENTORY_LEVEL); builder.add(CoreConstants.PROPERTY_ITEM_CONDITION); builder.add(CoreConstants.PROPERTY_ITEM_OFFERED); builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE); builder.add(CoreConstants.PROPERTY_MPN); builder.add(CoreConstants.PROPERTY_NAME); builder.add(CoreConstants.PROPERTY_OFFERED_BY); builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION); builder.add(CoreConstants.PROPERTY_PRICE); builder.add(CoreConstants.PROPERTY_PRICE_CURRENCY); builder.add(CoreConstants.PROPERTY_PRICE_SPECIFICATION); builder.add(CoreConstants.PROPERTY_PRICE_VALID_UNTIL); builder.add(CoreConstants.PROPERTY_REVIEW); builder.add(CoreConstants.PROPERTY_REVIEWS); builder.add(CoreConstants.PROPERTY_SAME_AS); builder.add(CoreConstants.PROPERTY_SELLER); builder.add(CoreConstants.PROPERTY_SERIAL_NUMBER); builder.add(CoreConstants.PROPERTY_SKU); builder.add(CoreConstants.PROPERTY_URL); builder.add(CoreConstants.PROPERTY_VALID_FROM); builder.add(CoreConstants.PROPERTY_VALID_THROUGH); builder.add(CoreConstants.PROPERTY_WARRANTY); builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION); builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE); return builder.build(); } static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<Offer.Builder> implements Offer.Builder { @Override public Offer.Builder addAcceptedPaymentMethod(PaymentMethod value) { return addProperty(CoreConstants.PROPERTY_ACCEPTED_PAYMENT_METHOD, value); } @Override public Offer.Builder addAcceptedPaymentMethod(String value) { return addProperty(CoreConstants.PROPERTY_ACCEPTED_PAYMENT_METHOD, Text.of(value)); } @Override public Offer.Builder addAdditionalType(URL value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value); } @Override public Offer.Builder addAdditionalType(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value)); } @Override public Offer.Builder addAddOn(Offer value) { return addProperty(CoreConstants.PROPERTY_ADD_ON, value); } @Override public Offer.Builder addAddOn(Offer.Builder value) { return addProperty(CoreConstants.PROPERTY_ADD_ON, value.build()); } @Override public Offer.Builder addAddOn(String value) { return addProperty(CoreConstants.PROPERTY_ADD_ON, Text.of(value)); } @Override public Offer.Builder addAdvanceBookingRequirement(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_ADVANCE_BOOKING_REQUIREMENT, value); } @Override public Offer.Builder addAdvanceBookingRequirement(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_ADVANCE_BOOKING_REQUIREMENT, value.build()); } @Override public Offer.Builder addAdvanceBookingRequirement(String value) { return addProperty(CoreConstants.PROPERTY_ADVANCE_BOOKING_REQUIREMENT, Text.of(value)); } @Override public Offer.Builder addAggregateRating(AggregateRating value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value); } @Override public Offer.Builder addAggregateRating(AggregateRating.Builder value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build()); } @Override public Offer.Builder addAggregateRating(String value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value)); } @Override public Offer.Builder addAlternateName(Text value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value); } @Override public Offer.Builder addAlternateName(String value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value)); } @Override public Offer.Builder addAreaServed(AdministrativeArea value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public Offer.Builder addAreaServed(AdministrativeArea.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public Offer.Builder addAreaServed(GeoShape value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public Offer.Builder addAreaServed(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public Offer.Builder addAreaServed(Place value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public Offer.Builder addAreaServed(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public Offer.Builder addAreaServed(Text value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public Offer.Builder addAreaServed(String value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, Text.of(value)); } @Override public Offer.Builder addAvailability(ItemAvailability value) { return addProperty(CoreConstants.PROPERTY_AVAILABILITY, value); } @Override public Offer.Builder addAvailability(String value) { return addProperty(CoreConstants.PROPERTY_AVAILABILITY, Text.of(value)); } @Override public Offer.Builder addAvailabilityEnds(DateTime value) { return addProperty(CoreConstants.PROPERTY_AVAILABILITY_ENDS, value); } @Override public Offer.Builder addAvailabilityEnds(String value) { return addProperty(CoreConstants.PROPERTY_AVAILABILITY_ENDS, Text.of(value)); } @Override public Offer.Builder addAvailabilityStarts(DateTime value) { return addProperty(CoreConstants.PROPERTY_AVAILABILITY_STARTS, value); } @Override public Offer.Builder addAvailabilityStarts(String value) { return addProperty(CoreConstants.PROPERTY_AVAILABILITY_STARTS, Text.of(value)); } @Override public Offer.Builder addAvailableAtOrFrom(Place value) { return addProperty(CoreConstants.PROPERTY_AVAILABLE_AT_OR_FROM, value); } @Override public Offer.Builder addAvailableAtOrFrom(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_AVAILABLE_AT_OR_FROM, value.build()); } @Override public Offer.Builder addAvailableAtOrFrom(String value) { return addProperty(CoreConstants.PROPERTY_AVAILABLE_AT_OR_FROM, Text.of(value)); } @Override public Offer.Builder addAvailableDeliveryMethod(DeliveryMethod value) { return addProperty(CoreConstants.PROPERTY_AVAILABLE_DELIVERY_METHOD, value); } @Override public Offer.Builder addAvailableDeliveryMethod(String value) { return addProperty(CoreConstants.PROPERTY_AVAILABLE_DELIVERY_METHOD, Text.of(value)); } @Override public Offer.Builder addBusinessFunction(BusinessFunction value) { return addProperty(CoreConstants.PROPERTY_BUSINESS_FUNCTION, value); } @Override public Offer.Builder addBusinessFunction(String value) { return addProperty(CoreConstants.PROPERTY_BUSINESS_FUNCTION, Text.of(value)); } @Override public Offer.Builder addCategory(PhysicalActivityCategory value) { return addProperty(CoreConstants.PROPERTY_CATEGORY, value); } @Override public Offer.Builder addCategory(Text value) { return addProperty(CoreConstants.PROPERTY_CATEGORY, value); } @Override public Offer.Builder addCategory(Thing value) { return addProperty(CoreConstants.PROPERTY_CATEGORY, value); } @Override public Offer.Builder addCategory(Thing.Builder value) { return addProperty(CoreConstants.PROPERTY_CATEGORY, value.build()); } @Override public Offer.Builder addCategory(String value) { return addProperty(CoreConstants.PROPERTY_CATEGORY, Text.of(value)); } @Override public Offer.Builder addDeliveryLeadTime(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_DELIVERY_LEAD_TIME, value); } @Override public Offer.Builder addDeliveryLeadTime(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_DELIVERY_LEAD_TIME, value.build()); } @Override public Offer.Builder addDeliveryLeadTime(String value) { return addProperty(CoreConstants.PROPERTY_DELIVERY_LEAD_TIME, Text.of(value)); } @Override public Offer.Builder addDescription(Text value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value); } @Override public Offer.Builder addDescription(String value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value)); } @Override public Offer.Builder addEligibleCustomerType(BusinessEntityType value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_CUSTOMER_TYPE, value); } @Override public Offer.Builder addEligibleCustomerType(String value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_CUSTOMER_TYPE, Text.of(value)); } @Override public Offer.Builder addEligibleDuration(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_DURATION, value); } @Override public Offer.Builder addEligibleDuration(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_DURATION, value.build()); } @Override public Offer.Builder addEligibleDuration(String value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_DURATION, Text.of(value)); } @Override public Offer.Builder addEligibleQuantity(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_QUANTITY, value); } @Override public Offer.Builder addEligibleQuantity(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_QUANTITY, value.build()); } @Override public Offer.Builder addEligibleQuantity(String value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_QUANTITY, Text.of(value)); } @Override public Offer.Builder addEligibleRegion(GeoShape value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION, value); } @Override public Offer.Builder addEligibleRegion(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION, value.build()); } @Override public Offer.Builder addEligibleRegion(Place value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION, value); } @Override public Offer.Builder addEligibleRegion(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION, value.build()); } @Override public Offer.Builder addEligibleRegion(Text value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION, value); } @Override public Offer.Builder addEligibleRegion(String value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION, Text.of(value)); } @Override public Offer.Builder addEligibleTransactionVolume(PriceSpecification value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_TRANSACTION_VOLUME, value); } @Override public Offer.Builder addEligibleTransactionVolume(PriceSpecification.Builder value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_TRANSACTION_VOLUME, value.build()); } @Override public Offer.Builder addEligibleTransactionVolume(String value) { return addProperty(CoreConstants.PROPERTY_ELIGIBLE_TRANSACTION_VOLUME, Text.of(value)); } @Override public Offer.Builder addGtin12(Text value) { return addProperty(CoreConstants.PROPERTY_GTIN12, value); } @Override public Offer.Builder addGtin12(String value) { return addProperty(CoreConstants.PROPERTY_GTIN12, Text.of(value)); } @Override public Offer.Builder addGtin13(Text value) { return addProperty(CoreConstants.PROPERTY_GTIN13, value); } @Override public Offer.Builder addGtin13(String value) { return addProperty(CoreConstants.PROPERTY_GTIN13, Text.of(value)); } @Override public Offer.Builder addGtin14(Text value) { return addProperty(CoreConstants.PROPERTY_GTIN14, value); } @Override public Offer.Builder addGtin14(String value) { return addProperty(CoreConstants.PROPERTY_GTIN14, Text.of(value)); } @Override public Offer.Builder addGtin8(Text value) { return addProperty(CoreConstants.PROPERTY_GTIN8, value); } @Override public Offer.Builder addGtin8(String value) { return addProperty(CoreConstants.PROPERTY_GTIN8, Text.of(value)); } @Override public Offer.Builder addImage(ImageObject value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public Offer.Builder addImage(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value.build()); } @Override public Offer.Builder addImage(URL value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public Offer.Builder addImage(String value) { return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value)); } @Override public Offer.Builder addIncludesObject(TypeAndQuantityNode value) { return addProperty(CoreConstants.PROPERTY_INCLUDES_OBJECT, value); } @Override public Offer.Builder addIncludesObject(TypeAndQuantityNode.Builder value) { return addProperty(CoreConstants.PROPERTY_INCLUDES_OBJECT, value.build()); } @Override public Offer.Builder addIncludesObject(String value) { return addProperty(CoreConstants.PROPERTY_INCLUDES_OBJECT, Text.of(value)); } @Override public Offer.Builder addIneligibleRegion(GeoShape value) { return addProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION, value); } @Override public Offer.Builder addIneligibleRegion(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION, value.build()); } @Override public Offer.Builder addIneligibleRegion(Place value) { return addProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION, value); } @Override public Offer.Builder addIneligibleRegion(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION, value.build()); } @Override public Offer.Builder addIneligibleRegion(Text value) { return addProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION, value); } @Override public Offer.Builder addIneligibleRegion(String value) { return addProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION, Text.of(value)); } @Override public Offer.Builder addInventoryLevel(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_INVENTORY_LEVEL, value); } @Override public Offer.Builder addInventoryLevel(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_INVENTORY_LEVEL, value.build()); } @Override public Offer.Builder addInventoryLevel(String value) { return addProperty(CoreConstants.PROPERTY_INVENTORY_LEVEL, Text.of(value)); } @Override public Offer.Builder addItemCondition(OfferItemCondition value) { return addProperty(CoreConstants.PROPERTY_ITEM_CONDITION, value); } @Override public Offer.Builder addItemCondition(String value) { return addProperty(CoreConstants.PROPERTY_ITEM_CONDITION, Text.of(value)); } @Override public Offer.Builder addItemOffered(Product value) { return addProperty(CoreConstants.PROPERTY_ITEM_OFFERED, value); } @Override public Offer.Builder addItemOffered(Product.Builder value) { return addProperty(CoreConstants.PROPERTY_ITEM_OFFERED, value.build()); } @Override public Offer.Builder addItemOffered(Service value) { return addProperty(CoreConstants.PROPERTY_ITEM_OFFERED, value); } @Override public Offer.Builder addItemOffered(Service.Builder value) { return addProperty(CoreConstants.PROPERTY_ITEM_OFFERED, value.build()); } @Override public Offer.Builder addItemOffered(String value) { return addProperty(CoreConstants.PROPERTY_ITEM_OFFERED, Text.of(value)); } @Override public Offer.Builder addMainEntityOfPage(CreativeWork value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public Offer.Builder addMainEntityOfPage(CreativeWork.Builder value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build()); } @Override public Offer.Builder addMainEntityOfPage(URL value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public Offer.Builder addMainEntityOfPage(String value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value)); } @Override public Offer.Builder addMpn(Text value) { return addProperty(CoreConstants.PROPERTY_MPN, value); } @Override public Offer.Builder addMpn(String value) { return addProperty(CoreConstants.PROPERTY_MPN, Text.of(value)); } @Override public Offer.Builder addName(Text value) { return addProperty(CoreConstants.PROPERTY_NAME, value); } @Override public Offer.Builder addName(String value) { return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value)); } @Override public Offer.Builder addOfferedBy(Organization value) { return addProperty(CoreConstants.PROPERTY_OFFERED_BY, value); } @Override public Offer.Builder addOfferedBy(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_OFFERED_BY, value.build()); } @Override public Offer.Builder addOfferedBy(Person value) { return addProperty(CoreConstants.PROPERTY_OFFERED_BY, value); } @Override public Offer.Builder addOfferedBy(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_OFFERED_BY, value.build()); } @Override public Offer.Builder addOfferedBy(String value) { return addProperty(CoreConstants.PROPERTY_OFFERED_BY, Text.of(value)); } @Override public Offer.Builder addPotentialAction(Action value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value); } @Override public Offer.Builder addPotentialAction(Action.Builder value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build()); } @Override public Offer.Builder addPotentialAction(String value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value)); } @Override public Offer.Builder addPrice(Number value) { return addProperty(CoreConstants.PROPERTY_PRICE, value); } @Override public Offer.Builder addPrice(Text value) { return addProperty(CoreConstants.PROPERTY_PRICE, value); } @Override public Offer.Builder addPrice(String value) { return addProperty(CoreConstants.PROPERTY_PRICE, Text.of(value)); } @Override public Offer.Builder addPriceCurrency(Text value) { return addProperty(CoreConstants.PROPERTY_PRICE_CURRENCY, value); } @Override public Offer.Builder addPriceCurrency(String value) { return addProperty(CoreConstants.PROPERTY_PRICE_CURRENCY, Text.of(value)); } @Override public Offer.Builder addPriceSpecification(PriceSpecification value) { return addProperty(CoreConstants.PROPERTY_PRICE_SPECIFICATION, value); } @Override public Offer.Builder addPriceSpecification(PriceSpecification.Builder value) { return addProperty(CoreConstants.PROPERTY_PRICE_SPECIFICATION, value.build()); } @Override public Offer.Builder addPriceSpecification(String value) { return addProperty(CoreConstants.PROPERTY_PRICE_SPECIFICATION, Text.of(value)); } @Override public Offer.Builder addPriceValidUntil(Date value) { return addProperty(CoreConstants.PROPERTY_PRICE_VALID_UNTIL, value); } @Override public Offer.Builder addPriceValidUntil(String value) { return addProperty(CoreConstants.PROPERTY_PRICE_VALID_UNTIL, Text.of(value)); } @Override public Offer.Builder addReview(Review value) { return addProperty(CoreConstants.PROPERTY_REVIEW, value); } @Override public Offer.Builder addReview(Review.Builder value) { return addProperty(CoreConstants.PROPERTY_REVIEW, value.build()); } @Override public Offer.Builder addReview(String value) { return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value)); } @Override public Offer.Builder addReviews(Review value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, value); } @Override public Offer.Builder addReviews(Review.Builder value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build()); } @Override public Offer.Builder addReviews(String value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value)); } @Override public Offer.Builder addSameAs(URL value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, value); } @Override public Offer.Builder addSameAs(String value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value)); } @Override public Offer.Builder addSeller(Organization value) { return addProperty(CoreConstants.PROPERTY_SELLER, value); } @Override public Offer.Builder addSeller(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_SELLER, value.build()); } @Override public Offer.Builder addSeller(Person value) { return addProperty(CoreConstants.PROPERTY_SELLER, value); } @Override public Offer.Builder addSeller(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_SELLER, value.build()); } @Override public Offer.Builder addSeller(String value) { return addProperty(CoreConstants.PROPERTY_SELLER, Text.of(value)); } @Override public Offer.Builder addSerialNumber(Text value) { return addProperty(CoreConstants.PROPERTY_SERIAL_NUMBER, value); } @Override public Offer.Builder addSerialNumber(String value) { return addProperty(CoreConstants.PROPERTY_SERIAL_NUMBER, Text.of(value)); } @Override public Offer.Builder addSku(Text value) { return addProperty(CoreConstants.PROPERTY_SKU, value); } @Override public Offer.Builder addSku(String value) { return addProperty(CoreConstants.PROPERTY_SKU, Text.of(value)); } @Override public Offer.Builder addUrl(URL value) { return addProperty(CoreConstants.PROPERTY_URL, value); } @Override public Offer.Builder addUrl(String value) { return addProperty(CoreConstants.PROPERTY_URL, Text.of(value)); } @Override public Offer.Builder addValidFrom(DateTime value) { return addProperty(CoreConstants.PROPERTY_VALID_FROM, value); } @Override public Offer.Builder addValidFrom(String value) { return addProperty(CoreConstants.PROPERTY_VALID_FROM, Text.of(value)); } @Override public Offer.Builder addValidThrough(DateTime value) { return addProperty(CoreConstants.PROPERTY_VALID_THROUGH, value); } @Override public Offer.Builder addValidThrough(String value) { return addProperty(CoreConstants.PROPERTY_VALID_THROUGH, Text.of(value)); } @Override public Offer.Builder addWarranty(WarrantyPromise value) { return addProperty(CoreConstants.PROPERTY_WARRANTY, value); } @Override public Offer.Builder addWarranty(WarrantyPromise.Builder value) { return addProperty(CoreConstants.PROPERTY_WARRANTY, value.build()); } @Override public Offer.Builder addWarranty(String value) { return addProperty(CoreConstants.PROPERTY_WARRANTY, Text.of(value)); } @Override public Offer.Builder addDetailedDescription(Article value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value); } @Override public Offer.Builder addDetailedDescription(Article.Builder value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build()); } @Override public Offer.Builder addDetailedDescription(String value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value)); } @Override public Offer.Builder addPopularityScore(PopularityScoreSpecification value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value); } @Override public Offer.Builder addPopularityScore(PopularityScoreSpecification.Builder value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build()); } @Override public Offer.Builder addPopularityScore(String value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value)); } @Override public Offer build() { return new OfferImpl(properties, reverseMap); } } public OfferImpl(Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) { super(properties, reverseMap); } @Override public String getFullTypeName() { return CoreConstants.TYPE_OFFER; } @Override public boolean includesProperty(String property) { return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property) || PROPERTY_SET.contains(GoogConstants.NAMESPACE + property) || PROPERTY_SET.contains(property); } @Override public ImmutableList<SchemaOrgType> getAcceptedPaymentMethodList() { return getProperty(CoreConstants.PROPERTY_ACCEPTED_PAYMENT_METHOD); } @Override public ImmutableList<SchemaOrgType> getAddOnList() { return getProperty(CoreConstants.PROPERTY_ADD_ON); } @Override public ImmutableList<SchemaOrgType> getAdvanceBookingRequirementList() { return getProperty(CoreConstants.PROPERTY_ADVANCE_BOOKING_REQUIREMENT); } @Override public ImmutableList<SchemaOrgType> getAggregateRatingList() { return getProperty(CoreConstants.PROPERTY_AGGREGATE_RATING); } @Override public ImmutableList<SchemaOrgType> getAreaServedList() { return getProperty(CoreConstants.PROPERTY_AREA_SERVED); } @Override public ImmutableList<SchemaOrgType> getAvailabilityList() { return getProperty(CoreConstants.PROPERTY_AVAILABILITY); } @Override public ImmutableList<SchemaOrgType> getAvailabilityEndsList() { return getProperty(CoreConstants.PROPERTY_AVAILABILITY_ENDS); } @Override public ImmutableList<SchemaOrgType> getAvailabilityStartsList() { return getProperty(CoreConstants.PROPERTY_AVAILABILITY_STARTS); } @Override public ImmutableList<SchemaOrgType> getAvailableAtOrFromList() { return getProperty(CoreConstants.PROPERTY_AVAILABLE_AT_OR_FROM); } @Override public ImmutableList<SchemaOrgType> getAvailableDeliveryMethodList() { return getProperty(CoreConstants.PROPERTY_AVAILABLE_DELIVERY_METHOD); } @Override public ImmutableList<SchemaOrgType> getBusinessFunctionList() { return getProperty(CoreConstants.PROPERTY_BUSINESS_FUNCTION); } @Override public ImmutableList<SchemaOrgType> getCategoryList() { return getProperty(CoreConstants.PROPERTY_CATEGORY); } @Override public ImmutableList<SchemaOrgType> getDeliveryLeadTimeList() { return getProperty(CoreConstants.PROPERTY_DELIVERY_LEAD_TIME); } @Override public ImmutableList<SchemaOrgType> getEligibleCustomerTypeList() { return getProperty(CoreConstants.PROPERTY_ELIGIBLE_CUSTOMER_TYPE); } @Override public ImmutableList<SchemaOrgType> getEligibleDurationList() { return getProperty(CoreConstants.PROPERTY_ELIGIBLE_DURATION); } @Override public ImmutableList<SchemaOrgType> getEligibleQuantityList() { return getProperty(CoreConstants.PROPERTY_ELIGIBLE_QUANTITY); } @Override public ImmutableList<SchemaOrgType> getEligibleRegionList() { return getProperty(CoreConstants.PROPERTY_ELIGIBLE_REGION); } @Override public ImmutableList<SchemaOrgType> getEligibleTransactionVolumeList() { return getProperty(CoreConstants.PROPERTY_ELIGIBLE_TRANSACTION_VOLUME); } @Override public ImmutableList<SchemaOrgType> getGtin12List() { return getProperty(CoreConstants.PROPERTY_GTIN12); } @Override public ImmutableList<SchemaOrgType> getGtin13List() { return getProperty(CoreConstants.PROPERTY_GTIN13); } @Override public ImmutableList<SchemaOrgType> getGtin14List() { return getProperty(CoreConstants.PROPERTY_GTIN14); } @Override public ImmutableList<SchemaOrgType> getGtin8List() { return getProperty(CoreConstants.PROPERTY_GTIN8); } @Override public ImmutableList<SchemaOrgType> getIncludesObjectList() { return getProperty(CoreConstants.PROPERTY_INCLUDES_OBJECT); } @Override public ImmutableList<SchemaOrgType> getIneligibleRegionList() { return getProperty(CoreConstants.PROPERTY_INELIGIBLE_REGION); } @Override public ImmutableList<SchemaOrgType> getInventoryLevelList() { return getProperty(CoreConstants.PROPERTY_INVENTORY_LEVEL); } @Override public ImmutableList<SchemaOrgType> getItemConditionList() { return getProperty(CoreConstants.PROPERTY_ITEM_CONDITION); } @Override public ImmutableList<SchemaOrgType> getItemOfferedList() { return getProperty(CoreConstants.PROPERTY_ITEM_OFFERED); } @Override public ImmutableList<SchemaOrgType> getMpnList() { return getProperty(CoreConstants.PROPERTY_MPN); } @Override public ImmutableList<SchemaOrgType> getOfferedByList() { return getProperty(CoreConstants.PROPERTY_OFFERED_BY); } @Override public ImmutableList<SchemaOrgType> getPriceList() { return getProperty(CoreConstants.PROPERTY_PRICE); } @Override public ImmutableList<SchemaOrgType> getPriceCurrencyList() { return getProperty(CoreConstants.PROPERTY_PRICE_CURRENCY); } @Override public ImmutableList<SchemaOrgType> getPriceSpecificationList() { return getProperty(CoreConstants.PROPERTY_PRICE_SPECIFICATION); } @Override public ImmutableList<SchemaOrgType> getPriceValidUntilList() { return getProperty(CoreConstants.PROPERTY_PRICE_VALID_UNTIL); } @Override public ImmutableList<SchemaOrgType> getReviewList() { return getProperty(CoreConstants.PROPERTY_REVIEW); } @Override public ImmutableList<SchemaOrgType> getReviewsList() { return getProperty(CoreConstants.PROPERTY_REVIEWS); } @Override public ImmutableList<SchemaOrgType> getSellerList() { return getProperty(CoreConstants.PROPERTY_SELLER); } @Override public ImmutableList<SchemaOrgType> getSerialNumberList() { return getProperty(CoreConstants.PROPERTY_SERIAL_NUMBER); } @Override public ImmutableList<SchemaOrgType> getSkuList() { return getProperty(CoreConstants.PROPERTY_SKU); } @Override public ImmutableList<SchemaOrgType> getValidFromList() { return getProperty(CoreConstants.PROPERTY_VALID_FROM); } @Override public ImmutableList<SchemaOrgType> getValidThroughList() { return getProperty(CoreConstants.PROPERTY_VALID_THROUGH); } @Override public ImmutableList<SchemaOrgType> getWarrantyList() { return getProperty(CoreConstants.PROPERTY_WARRANTY); } }
apache/tez
35,091
tez-mapreduce/src/test/java/org/apache/hadoop/mapred/split/TestGroupedSplits.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred.split; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.*; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import org.apache.commons.lang.mutable.MutableInt; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.util.ReflectionUtils; import org.apache.tez.common.MockDNSToSwitchMapping; import org.apache.tez.mapreduce.grouper.TezSplitGrouper; import com.google.common.collect.Sets; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestGroupedSplits { private static final Logger LOG = LoggerFactory.getLogger(TestGroupedSplits.class); private static JobConf defaultConf = new JobConf(); private static FileSystem localFs = null; static { try { defaultConf.set("fs.defaultFS", "file:///"); localFs = FileSystem.getLocal(defaultConf); } catch (IOException e) { throw new RuntimeException("init failure", e); } } @SuppressWarnings("deprecation") private static Path workDir = new Path(new Path(System.getProperty("test.build.data", "/tmp")), "TestCombineTextInputFormat").makeQualified(localFs); // A reporter that does nothing private static final Reporter voidReporter = Reporter.NULL; @Test(timeout=10000) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = "+seed); random.setSeed(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; createFiles(length, numFiles, random); // create a combined split for the files TextInputFormat wrappedFormat = new TextInputFormat(); wrappedFormat.configure(job); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setDesiredNumberOfSplits(1); format.setInputFormat(wrappedFormat); LongWritable key = new LongWritable(); Text value = new Text(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length/20)+1; LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.info("splitting: got = " + splits.length); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.length); InputSplit split = splits[0]; assertEquals("It should be TezGroupedSplit", TezGroupedSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " at position "+reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.info("splits="+split+" count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } private static class Range { private final int start; private final int end; Range(int start, int end) { this.start = start; this.end = end; } @Override public String toString() { return "(" + start + ", " + end + ")"; } } private static Range[] createRanges(int length, int numFiles, Random random) { // generate a number of files with various lengths Range[] ranges = new Range[numFiles]; for (int i = 0; i < numFiles; i++) { int start = i == 0 ? 0 : ranges[i-1].end; int end = i == numFiles - 1 ? length : (length/numFiles)*(2*i + 1)/2 + random.nextInt(length/numFiles) + 1; ranges[i] = new Range(start, end); } return ranges; } private static void createFiles(int length, int numFiles, Random random) throws IOException { Range[] ranges = createRanges(length, numFiles, random); for (int i = 0; i < numFiles; i++) { Path file = new Path(workDir, "test_" + i + ".txt"); Writer writer = new OutputStreamWriter(localFs.create(file)); Range range = ranges[i]; try { for (int j = range.start; j < range.end; j++) { writer.write(Integer.toString(j)); writer.write("\n"); } } finally { writer.close(); } } } private static void writeFile(FileSystem fs, Path name, CompressionCodec codec, String contents) throws IOException { OutputStream stm; if (codec == null) { stm = fs.create(name); } else { stm = codec.createOutputStream(fs.create(name)); } stm.write(contents.getBytes()); stm.close(); } private static List<Text> readSplit(InputFormat<LongWritable,Text> format, InputSplit split, JobConf job) throws IOException { List<Text> result = new ArrayList<Text>(); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); LongWritable key = reader.createKey(); Text value = reader.createValue(); while (reader.next(key, value)) { result.add(value); value = reader.createValue(); } reader.close(); return result; } @BeforeClass public static void beforeClass() { MockDNSToSwitchMapping.initializeMockRackResolver(); } /** * Test using the gzip codec for reading */ @Test(timeout=10000) public void testGzip() throws IOException { JobConf job = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); ReflectionUtils.setConf(gzip, job); localFs.delete(workDir, true); writeFile(localFs, new Path(workDir, "part1.txt.gz"), gzip, "the quick\nbrown\nfox jumped\nover\n the lazy\n dog\n"); writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, "is\ngzip\n"); writeFile(localFs, new Path(workDir, "part3.txt.gz"), gzip, "one\nmore\nsplit\n"); FileInputFormat.setInputPaths(job, workDir); TextInputFormat wrappedFormat = new TextInputFormat(); wrappedFormat.configure(job); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setInputFormat(wrappedFormat); // TextInputFormat will produce 3 splits for (int j=1; j<=3; ++j) { format.setDesiredNumberOfSplits(j); InputSplit[] splits = format.getSplits(job, 100); if (j==1) { // j==1 covers single split corner case // and does not do grouping assertEquals("compressed splits == " + j, j, splits.length); } List<Text> results = new ArrayList<Text>(); for (int i=0; i<splits.length; ++i) { List<Text> read = readSplit(format, splits[i], job); results.addAll(read); } assertEquals("splits length", 11, results.size()); final String[] firstList = {"the quick", "brown", "fox jumped", "over", " the lazy", " dog"}; final String[] secondList = {"is", "gzip"}; final String[] thirdList = {"one", "more", "split"}; String first = results.get(0).toString(); int start = 0; switch (first.charAt(0)) { case 't': start = testResults(results, firstList, start); break; case 'i': start = testResults(results, secondList, start); break; case 'o': start = testResults(results, thirdList, start); break; default: Assert.fail("unexpected first token - " + first); } } } private static int testResults(List<Text> results, String[] first, int start) { for (int i = 0; i < first.length; i++) { assertEquals("splits["+i+"]", first[i], results.get(start+i).toString()); } return first.length+start; } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test(timeout=10000) public void testGroupedSplitSize() throws IOException { JobConf job = new JobConf(defaultConf); InputFormat mockWrappedFormat = mock(InputFormat.class); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setInputFormat(mockWrappedFormat); job = (JobConf) TezSplitGrouper.newConfigBuilder(job) .setGroupingSplitSize(50*1000*1000l, 500*1000*1000l) .build(); InputSplit mockSplit1 = mock(InputSplit.class); when(mockSplit1.getLength()).thenReturn(10*1000*1000l); when(mockSplit1.getLocations()).thenReturn(null); int numSplits = 100; InputSplit[] mockSplits = new InputSplit[numSplits]; for (int i=0; i<numSplits; i++) { mockSplits[i] = mockSplit1; } when(mockWrappedFormat.getSplits(any(), anyInt())).thenReturn(mockSplits); // desired splits not set. We end up choosing min/max split size based on // total data and num original splits. In this case, min size will be hit InputSplit[] splits = format.getSplits(job, 0); assertEquals(25, splits.length); // split too big. override with max format.setDesiredNumberOfSplits(1); splits = format.getSplits(job, 0); assertEquals(4, splits.length); // splits too small. override with min format.setDesiredNumberOfSplits(1000); splits = format.getSplits(job, 0); assertEquals(25, splits.length); } class TestInputSplit implements InputSplit { long length; String[] locations; int position; public TestInputSplit(long length, String[] locations, int position) { this.length = length; this.locations = locations; this.position = position; } @Override public void write(DataOutput out) throws IOException { } @Override public void readFields(DataInput in) throws IOException { } @Override public long getLength() throws IOException { return length; } @Override public String[] getLocations() throws IOException { return locations; } public int getPosition() { return position; } } @Test (timeout=5000) public void testMaintainSplitOrdering() throws IOException { int numLocations = 3; String[] locations = new String[numLocations]; InputSplit[] origSplits = new InputSplit[numLocations*4]; long splitLength = 100; for (int i=0; i<numLocations; i++) { locations[i] = "node" + i; String[] splitLoc = {locations[i]}; for (int j=0; j<4; j++) { int pos = i*4 + j; origSplits[pos] = new TestInputSplit(splitLength, splitLoc, pos); } } TezMapredSplitsGrouper grouper = new TezMapredSplitsGrouper(); JobConf conf = new JobConf(defaultConf); conf = (JobConf) TezSplitGrouper.newConfigBuilder(conf) .setGroupingSplitSize(splitLength*3, splitLength*3) .setGroupingRackSplitSizeReduction(1) .build(); // based on the above settings the 3 nodes will each group 3 splits. // the remainig 3 splits (1 from each node) will be grouped at rack level (default-rack) // all of them will maintain ordering InputSplit[] groupedSplits = grouper.getGroupedSplits(conf, origSplits, 4, "InputFormat"); assertEquals(4, groupedSplits.length); for (int i=0; i<4; ++i) { TezGroupedSplit split = (TezGroupedSplit)groupedSplits[i]; List<InputSplit> innerSplits = split.getGroupedSplits(); int pos = -1; // splits in group maintain original order for (InputSplit innerSplit : innerSplits) { int splitPos = ((TestInputSplit) innerSplit).getPosition(); assertTrue(pos < splitPos); pos = splitPos; } // last one is rack split if (i==3) { assertTrue(split.getRack() != null); } } } @Test (timeout=5000) public void testRepeatableSplits() throws IOException { int numLocations = 3; String[] locations = new String[numLocations]; InputSplit[] origSplits = new InputSplit[numLocations*4]; long splitLength = 100; for (int i=0; i<numLocations; i++) { locations[i] = "node" + i; } for (int i=0; i<4; i++) { String[] splitLoc = null; for (int j=0; j<3; j++) { int pos = i*3 + j; if (pos < 9) { // for the first 9 splits do node grouping // copy of the string to verify the comparator does not succeed by comparing the same object // provide 2 locations for each split to provide alternates for non-repeatability String[] nodeLoc = {new String(locations[i]), new String(locations[(i+1)%numLocations])}; splitLoc = nodeLoc; } else { // for the last 3 splits do rack grouping by spreading them across the 3 nodes String[] rackLoc = {new String(locations[j])}; splitLoc = rackLoc; } origSplits[pos] = new TestInputSplit(splitLength, splitLoc, pos); } } TezMapredSplitsGrouper grouper = new TezMapredSplitsGrouper(); JobConf conf = new JobConf(defaultConf); conf = (JobConf) TezSplitGrouper.newConfigBuilder(conf) .setGroupingSplitSize(splitLength*3, splitLength*3) .setGroupingRackSplitSizeReduction(1) .build(); // based on the above settings the 3 nodes will each group 3 splits. // the remainig 3 splits (1 from each node) will be grouped at rack level (default-rack) // all of them will maintain ordering InputSplit[] groupedSplits1 = grouper.getGroupedSplits(conf, origSplits, 4, "InputFormat"); InputSplit[] groupedSplits2 = grouper.getGroupedSplits(conf, origSplits, 4, "InputFormat"); // KKK Start looking here. assertEquals(4, groupedSplits1.length); assertEquals(4, groupedSplits2.length); // check both split groups are the same. this depends on maintaining split order tested above for (int i=0; i<4; ++i) { TezGroupedSplit gSplit1 = ((TezGroupedSplit) groupedSplits1[i]); List<InputSplit> testSplits1 = gSplit1.getGroupedSplits(); TezGroupedSplit gSplit2 = ((TezGroupedSplit) groupedSplits2[i]); List<InputSplit> testSplits2 = gSplit2.getGroupedSplits(); assertEquals(testSplits1.size(), testSplits2.size()); for (int j=0; j<testSplits1.size(); j++) { TestInputSplit split1 = (TestInputSplit) testSplits1.get(j); TestInputSplit split2 = (TestInputSplit) testSplits2.get(j); assertEquals(split1.position, split2.position); } if (i==3) { // check for rack split creation. Ensures repeatability holds for rack splits also assertTrue(gSplit1.getRack() != null); assertTrue(gSplit2.getRack() != null); } } } @Test (timeout = 30000) public void testS3Scenario() throws IOException { //There can be multiple nodes in cluster, but locations would be "localhost" in s3 String[] locations = {"localhost"}; int oriSplits = 52; int desiredSplits = 19; long splitLength = 231958; InputSplit[] origSplits = new InputSplit[oriSplits]; for (int i = 0; i < oriSplits; i++) { String[] splitLoc = locations; origSplits[i] = new TestInputSplit(splitLength, splitLoc, i); } TezMapredSplitsGrouper grouper = new TezMapredSplitsGrouper(); JobConf conf = new JobConf(defaultConf); conf = (JobConf) TezSplitGrouper.newConfigBuilder(conf).build(); //Create splits now InputSplit[] groupedSplits = grouper.getGroupedSplits(conf, origSplits, desiredSplits, "SampleFormat"); //Verify int splitsInGroup = oriSplits / desiredSplits; int totalSplits = (int) Math.ceil(oriSplits * 1.0 / splitsInGroup); assertEquals(totalSplits, groupedSplits.length); // min split optimization should not be invoked if any location is not localhost String[] nonLocalLocations = { "EmptyLocation", "localhost" }; origSplits = new InputSplit[oriSplits]; for (int i = 0; i < oriSplits; i++) { String[] splitLoc = nonLocalLocations; origSplits[i] = new TestInputSplit(splitLength, splitLoc, i); } grouper = new TezMapredSplitsGrouper(); conf = new JobConf(defaultConf); conf = (JobConf) TezSplitGrouper.newConfigBuilder(conf).build(); //Create splits now groupedSplits = grouper.getGroupedSplits(conf, origSplits, desiredSplits, "SampleFormat"); //splits should be 1 assertEquals(1, groupedSplits.length); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test(timeout=10000) public void testGroupedSplitWithDuplicates() throws IOException { JobConf job = new JobConf(defaultConf); InputFormat mockWrappedFormat = mock(InputFormat.class); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setInputFormat(mockWrappedFormat); // put multiple splits with multiple copies in the same location String[] locations = {"common", "common", "common"}; int numSplits = 3; InputSplit[] mockSplits = new InputSplit[numSplits]; for (int i=0; i<numSplits; i++) { InputSplit mockSplit = mock(InputSplit.class); when(mockSplit.getLength()).thenReturn(10*1000*1000l); when(mockSplit.getLocations()).thenReturn(locations); mockSplits[i] = mockSplit; } when(mockWrappedFormat.getSplits(any(), anyInt())).thenReturn(mockSplits); format.setDesiredNumberOfSplits(1); InputSplit[] splits = format.getSplits(job, 1); assertEquals(1, splits.length); TezGroupedSplit split = (TezGroupedSplit) splits[0]; // all 3 splits are present assertEquals(numSplits, split.wrappedSplits.size()); Set<InputSplit> splitSet = Sets.newHashSet(split.wrappedSplits); assertEquals(numSplits, splitSet.size()); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test(timeout=10000) public void testGroupedSplitWithBadLocations() throws IOException { JobConf job = new JobConf(defaultConf); InputFormat mockWrappedFormat = mock(InputFormat.class); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setInputFormat(mockWrappedFormat); // put multiple splits with multiple copies in the same location int numSplits = 3; InputSplit[] mockSplits = new InputSplit[numSplits]; InputSplit mockSplit1 = mock(InputSplit.class); when(mockSplit1.getLength()).thenReturn(10*1000*1000l); when(mockSplit1.getLocations()).thenReturn(null); mockSplits[0] = mockSplit1; InputSplit mockSplit2 = mock(InputSplit.class); when(mockSplit2.getLength()).thenReturn(10*1000*1000l); when(mockSplit2.getLocations()).thenReturn(new String[] {null}); mockSplits[1] = mockSplit2; InputSplit mockSplit3 = mock(InputSplit.class); when(mockSplit3.getLength()).thenReturn(10*1000*1000l); when(mockSplit3.getLocations()).thenReturn(new String[] {null, null}); mockSplits[2] = mockSplit3; when(mockWrappedFormat.getSplits(any(), anyInt())).thenReturn(mockSplits); format.setDesiredNumberOfSplits(1); InputSplit[] splits = format.getSplits(job, 1); assertEquals(1, splits.length); TezGroupedSplit split = (TezGroupedSplit) splits[0]; // all 3 splits are present assertEquals(numSplits, split.wrappedSplits.size()); ByteArrayOutputStream bOut = new ByteArrayOutputStream(); split.write(new DataOutputStream(bOut)); } @SuppressWarnings({ "rawtypes", "unchecked" }) // No grouping @Test(timeout=10000) public void testGroupedSplitWithBadLocations2() throws IOException { JobConf job = new JobConf(defaultConf); InputFormat mockWrappedFormat = mock(InputFormat.class); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setInputFormat(mockWrappedFormat); // put multiple splits with multiple copies in the same location String validLocation = "validLocation"; String validLocation2 = "validLocation2"; int numSplits = 5; InputSplit[] mockSplits = new InputSplit[numSplits]; InputSplit mockSplit1 = mock(InputSplit.class); when(mockSplit1.getLength()).thenReturn(100*1000*1000l); when(mockSplit1.getLocations()).thenReturn(null); mockSplits[0] = mockSplit1; InputSplit mockSplit2 = mock(InputSplit.class); when(mockSplit2.getLength()).thenReturn(100*1000*1000l); when(mockSplit2.getLocations()).thenReturn(new String[] {null}); mockSplits[1] = mockSplit2; InputSplit mockSplit3 = mock(InputSplit.class); when(mockSplit3.getLength()).thenReturn(100*1000*1000l); when(mockSplit3.getLocations()).thenReturn(new String[] {null, null}); mockSplits[2] = mockSplit3; InputSplit mockSplit4 = mock(InputSplit.class); when(mockSplit4.getLength()).thenReturn(100*1000*1000l); when(mockSplit4.getLocations()).thenReturn(new String[] {validLocation}); mockSplits[3] = mockSplit4; InputSplit mockSplit5 = mock(InputSplit.class); when(mockSplit5.getLength()).thenReturn(100*1000*1000l); when(mockSplit5.getLocations()).thenReturn(new String[] {validLocation, null, validLocation2}); mockSplits[4] = mockSplit4; when(mockWrappedFormat.getSplits(any(), anyInt())).thenReturn(mockSplits); format.setDesiredNumberOfSplits(numSplits); InputSplit[] splits = format.getSplits(job, 1); assertEquals(numSplits, splits.length); for (int i = 0 ; i < numSplits ; i++) { TezGroupedSplit split = (TezGroupedSplit) splits[i]; // all 3 splits are present assertEquals(1, split.wrappedSplits.size()); if (i==3) { assertEquals(1, split.getLocations().length); assertEquals(validLocation, split.getLocations()[0]); } else if (i==4) { assertEquals(1, split.getLocations().length); assertTrue(split.getLocations()[0].equals(validLocation) || split.getLocations()[0].equals(validLocation2)); } else { Assert.assertNull(split.getLocations()); } ByteArrayOutputStream bOut = new ByteArrayOutputStream(); split.write(new DataOutputStream(bOut)); } } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test(timeout=10000) public void testGroupedSplitWithEstimator() throws IOException { JobConf job = new JobConf(defaultConf); job = (JobConf) TezSplitGrouper.newConfigBuilder(job) .setGroupingSplitSize(12*1000*1000l, 25*1000*1000l) .build(); InputFormat mockWrappedFormat = mock(InputFormat.class); TezGroupedSplitsInputFormat<LongWritable , Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setInputFormat(mockWrappedFormat); final InputSplit mockSplit1 = mock(InputSplit.class); final InputSplit mockSplit2 = mock(InputSplit.class); final InputSplit mockSplit3 = mock(InputSplit.class); final String[] locations = new String[] { "common", "common", "common" }; final SplitSizeEstimator estimator = new SplitSizeEstimator() { @Override public long getEstimatedSize(InputSplit split) throws IOException { LOG.info("Estimating 10x of " + split.getLength()); // 10x compression return 10 * split.getLength(); } }; when(mockSplit1.getLength()).thenReturn(1000 * 1000l); when(mockSplit1.getLocations()).thenReturn(locations); when(mockSplit2.getLength()).thenReturn(1000 * 1000l); when(mockSplit2.getLocations()).thenReturn(locations); when(mockSplit3.getLength()).thenReturn(2 * 1000 * 1000l + 1); when(mockSplit3.getLocations()).thenReturn(locations); // put multiple splits which should be grouped (1,1,2) Mb, but estimated to be 10x // 10,10,20Mb - grouped with min=12Mb, max=25Mb // should be grouped as (1,1),(2) InputSplit[] mockSplits = new InputSplit[] { mockSplit1, mockSplit2, mockSplit3 }; when(mockWrappedFormat.getSplits(any(), anyInt())) .thenReturn(mockSplits); format.setDesiredNumberOfSplits(1); format.setSplitSizeEstimator(estimator); InputSplit[] splits = format.getSplits(job, 1); // due to the min = 12Mb assertEquals(2, splits.length); for (InputSplit group : splits) { TezGroupedSplit split = (TezGroupedSplit) group; if (split.wrappedSplits.size() == 2) { // split1+split2 assertEquals(split.getLength(), 2 * 1000 * 1000l); } else { // split3 assertEquals(split.getLength(), 2 * 1000 * 1000l + 1); } } } // Splits get grouped @Test (timeout = 10000) public void testGroupingWithCustomLocations1() throws IOException { int numSplits = 3; InputSplit[] mockSplits = new InputSplit[numSplits]; InputSplit mockSplit1 = mock(InputSplit.class); when(mockSplit1.getLength()).thenReturn(100*1000*1000l); when(mockSplit1.getLocations()).thenReturn(new String[] {"location1", "location2"}); mockSplits[0] = mockSplit1; InputSplit mockSplit2 = mock(InputSplit.class); when(mockSplit2.getLength()).thenReturn(100*1000*1000l); when(mockSplit2.getLocations()).thenReturn(new String[] {"location3", "location4"}); mockSplits[1] = mockSplit2; InputSplit mockSplit3 = mock(InputSplit.class); when(mockSplit3.getLength()).thenReturn(100*1000*1000l); when(mockSplit3.getLocations()).thenReturn(new String[] {"location5", "location6"}); mockSplits[2] = mockSplit3; SplitLocationProvider locationProvider = new SplitLocationProvider() { @Override public String[] getLocations(InputSplit split) throws IOException { return new String[] {"customLocation"}; } }; TezMapredSplitsGrouper splitsGrouper = new TezMapredSplitsGrouper(); InputSplit[] groupedSplits = splitsGrouper.getGroupedSplits(new Configuration(defaultConf), mockSplits, 1, "MockInputForamt", null, locationProvider); // Sanity. 1 group, with 3 splits. assertEquals(1, groupedSplits.length); assertTrue(groupedSplits[0] instanceof TezGroupedSplit); TezGroupedSplit groupedSplit = (TezGroupedSplit)groupedSplits[0]; assertEquals(3, groupedSplit.getGroupedSplits().size()); // Verify that the split ends up being grouped to the custom location. assertEquals(1, groupedSplit.getLocations().length); assertEquals("customLocation", groupedSplit.getLocations()[0]); } // Original splits returned. @Test (timeout = 10000) public void testGroupingWithCustomLocations2() throws IOException { int numSplits = 3; InputSplit[] mockSplits = new InputSplit[numSplits]; InputSplit mockSplit1 = mock(InputSplit.class); when(mockSplit1.getLength()).thenReturn(100*1000*1000l); when(mockSplit1.getLocations()).thenReturn(new String[] {"location1", "location2"}); mockSplits[0] = mockSplit1; InputSplit mockSplit2 = mock(InputSplit.class); when(mockSplit2.getLength()).thenReturn(100*1000*1000l); when(mockSplit2.getLocations()).thenReturn(new String[] {"location3", "location4"}); mockSplits[1] = mockSplit2; InputSplit mockSplit3 = mock(InputSplit.class); when(mockSplit3.getLength()).thenReturn(100*1000*1000l); when(mockSplit3.getLocations()).thenReturn(new String[] {"location5", "location6"}); mockSplits[2] = mockSplit3; SplitLocationProvider locationProvider = new SplitLocationProvider() { @Override public String[] getLocations(InputSplit split) throws IOException { return new String[] {"customLocation"}; } }; TezMapredSplitsGrouper splitsGrouper = new TezMapredSplitsGrouper(); InputSplit[] groupedSplits = splitsGrouper.getGroupedSplits(new Configuration(defaultConf), mockSplits, 3, "MockInputForamt", null, locationProvider); // Sanity. 3 group, with 1 split each assertEquals(3, groupedSplits.length); for (int i = 0 ; i < 3 ; i++) { assertTrue(groupedSplits[i] instanceof TezGroupedSplit); TezGroupedSplit groupedSplit = (TezGroupedSplit)groupedSplits[i]; assertEquals(1, groupedSplit.getGroupedSplits().size()); // Verify the splits have their final location set to customLocation assertEquals(1, groupedSplit.getLocations().length); assertEquals("customLocation", groupedSplit.getLocations()[0]); } } @Test(timeout = 5000) public void testForceNodeLocalSplits() throws IOException { int numLocations = 7; long splitLen = 100L; String[] locations = new String[numLocations]; for (int i = 0; i < numLocations; i++) { locations[i] = "node" + i; } // Generate 24 splits (6 per node) spread evenly across node0-node3. // Generate 1 split each on the remaining 3 nodes (4-6) int numSplits = 27; InputSplit[] rawSplits = new InputSplit[numSplits]; for (int i = 0; i < 27; i++) { String splitLoc[] = new String[1]; if (i < 24) { splitLoc[0] = locations[i % 4]; } else { splitLoc[0] = locations[4 + i % 24]; } rawSplits[i] = new TestInputSplit(splitLen, splitLoc, i); } TezMapredSplitsGrouper grouper = new TezMapredSplitsGrouper(); JobConf confDisallowSmallEarly = new JobConf(defaultConf); confDisallowSmallEarly = (JobConf) TezSplitGrouper.newConfigBuilder(confDisallowSmallEarly) .setGroupingSplitSize(splitLen * 3, splitLen * 3) .setGroupingRackSplitSizeReduction(1) .setNodeLocalGroupsOnly(false) .build(); JobConf confSmallEarly = new JobConf(defaultConf); confSmallEarly = (JobConf) TezSplitGrouper.newConfigBuilder(confSmallEarly) .setGroupingSplitSize(splitLen * 3, splitLen * 3) .setGroupingRackSplitSizeReduction(1) .setNodeLocalGroupsOnly(true) .build(); // Without early grouping -> 4 * 2 node local, 1 merged - 9 total // With early grouping -> 4 * 2 node local (first 4 nodes), 3 smaller node local (4-6) -> 11 total // Requesting 9 based purely on size. InputSplit[] groupedSplitsDisallowSmallEarly = grouper.getGroupedSplits(confDisallowSmallEarly, rawSplits, 9, "InputFormat"); assertEquals(9, groupedSplitsDisallowSmallEarly.length); // Verify the actual splits as well. Map<String, MutableInt> matchedLocations = new HashMap<>(); verifySplitsFortestAllowSmallSplitsEarly(groupedSplitsDisallowSmallEarly); TezGroupedSplit group = (TezGroupedSplit) groupedSplitsDisallowSmallEarly[8]; assertEquals(3, group.getLocations().length); assertEquals(3, group.getGroupedSplits().size()); Set<String> exp = Sets.newHashSet(locations[4], locations[5], locations[6]); for (int i = 0; i < 3; i++) { LOG.info(group.getLocations()[i]); exp.remove(group.getLocations()[i]); } assertEquals(0, exp.size()); InputSplit[] groupedSplitsSmallEarly = grouper.getGroupedSplits(confSmallEarly, rawSplits, 9, "InputFormat"); assertEquals(11, groupedSplitsSmallEarly.length); // The first 8 are the larger groups. verifySplitsFortestAllowSmallSplitsEarly(groupedSplitsSmallEarly); exp = Sets.newHashSet(locations[4], locations[5], locations[6]); for (int i = 8; i < 11; i++) { group = (TezGroupedSplit) groupedSplitsSmallEarly[i]; assertEquals(1, group.getLocations().length); assertEquals(1, group.getGroupedSplits().size()); String matchedLoc = group.getLocations()[0]; assertTrue(exp.contains(matchedLoc)); exp.remove(matchedLoc); } assertEquals(0, exp.size()); } private void verifySplitsFortestAllowSmallSplitsEarly(InputSplit[] groupedSplits) throws IOException { Map<String, MutableInt> matchedLocations = new HashMap<>(); for (int i = 0; i < 8; i++) { TezGroupedSplit group = (TezGroupedSplit) groupedSplits[i]; assertEquals(1, group.getLocations().length); assertEquals(3, group.getGroupedSplits().size()); String matchedLoc = group.getLocations()[0]; MutableInt count = matchedLocations.get(matchedLoc); if (count == null) { count = new MutableInt(0); matchedLocations.put(matchedLoc, count); } count.increment(); } for (Map.Entry<String, MutableInt> entry : matchedLocations.entrySet()) { String loc = entry.getKey(); int nodeId = Character.getNumericValue(loc.charAt(loc.length() - 1)); assertTrue(nodeId < 4); assertTrue(loc.startsWith("node") && loc.length() == 5); assertEquals(2, entry.getValue().getValue()); } } }
openjdk/jdk8
35,386
jdk/src/share/classes/java/lang/reflect/Proxy.java
/* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.lang.reflect; import java.lang.ref.WeakReference; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; import java.util.IdentityHashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; import sun.misc.ProxyGenerator; import sun.misc.VM; import sun.reflect.CallerSensitive; import sun.reflect.Reflection; import sun.reflect.misc.ReflectUtil; import sun.security.util.SecurityConstants; /** * {@code Proxy} provides static methods for creating dynamic proxy * classes and instances, and it is also the superclass of all * dynamic proxy classes created by those methods. * * <p>To create a proxy for some interface {@code Foo}: * <pre> * InvocationHandler handler = new MyInvocationHandler(...); * Class&lt;?&gt; proxyClass = Proxy.getProxyClass(Foo.class.getClassLoader(), Foo.class); * Foo f = (Foo) proxyClass.getConstructor(InvocationHandler.class). * newInstance(handler); * </pre> * or more simply: * <pre> * Foo f = (Foo) Proxy.newProxyInstance(Foo.class.getClassLoader(), * new Class&lt;?&gt;[] { Foo.class }, * handler); * </pre> * * <p>A <i>dynamic proxy class</i> (simply referred to as a <i>proxy * class</i> below) is a class that implements a list of interfaces * specified at runtime when the class is created, with behavior as * described below. * * A <i>proxy interface</i> is such an interface that is implemented * by a proxy class. * * A <i>proxy instance</i> is an instance of a proxy class. * * Each proxy instance has an associated <i>invocation handler</i> * object, which implements the interface {@link InvocationHandler}. * A method invocation on a proxy instance through one of its proxy * interfaces will be dispatched to the {@link InvocationHandler#invoke * invoke} method of the instance's invocation handler, passing the proxy * instance, a {@code java.lang.reflect.Method} object identifying * the method that was invoked, and an array of type {@code Object} * containing the arguments. The invocation handler processes the * encoded method invocation as appropriate and the result that it * returns will be returned as the result of the method invocation on * the proxy instance. * * <p>A proxy class has the following properties: * * <ul> * <li>Proxy classes are <em>public, final, and not abstract</em> if * all proxy interfaces are public.</li> * * <li>Proxy classes are <em>non-public, final, and not abstract</em> if * any of the proxy interfaces is non-public.</li> * * <li>The unqualified name of a proxy class is unspecified. The space * of class names that begin with the string {@code "$Proxy"} * should be, however, reserved for proxy classes. * * <li>A proxy class extends {@code java.lang.reflect.Proxy}. * * <li>A proxy class implements exactly the interfaces specified at its * creation, in the same order. * * <li>If a proxy class implements a non-public interface, then it will * be defined in the same package as that interface. Otherwise, the * package of a proxy class is also unspecified. Note that package * sealing will not prevent a proxy class from being successfully defined * in a particular package at runtime, and neither will classes already * defined by the same class loader and the same package with particular * signers. * * <li>Since a proxy class implements all of the interfaces specified at * its creation, invoking {@code getInterfaces} on its * {@code Class} object will return an array containing the same * list of interfaces (in the order specified at its creation), invoking * {@code getMethods} on its {@code Class} object will return * an array of {@code Method} objects that include all of the * methods in those interfaces, and invoking {@code getMethod} will * find methods in the proxy interfaces as would be expected. * * <li>The {@link Proxy#isProxyClass Proxy.isProxyClass} method will * return true if it is passed a proxy class-- a class returned by * {@code Proxy.getProxyClass} or the class of an object returned by * {@code Proxy.newProxyInstance}-- and false otherwise. * * <li>The {@code java.security.ProtectionDomain} of a proxy class * is the same as that of system classes loaded by the bootstrap class * loader, such as {@code java.lang.Object}, because the code for a * proxy class is generated by trusted system code. This protection * domain will typically be granted * {@code java.security.AllPermission}. * * <li>Each proxy class has one public constructor that takes one argument, * an implementation of the interface {@link InvocationHandler}, to set * the invocation handler for a proxy instance. Rather than having to use * the reflection API to access the public constructor, a proxy instance * can be also be created by calling the {@link Proxy#newProxyInstance * Proxy.newProxyInstance} method, which combines the actions of calling * {@link Proxy#getProxyClass Proxy.getProxyClass} with invoking the * constructor with an invocation handler. * </ul> * * <p>A proxy instance has the following properties: * * <ul> * <li>Given a proxy instance {@code proxy} and one of the * interfaces implemented by its proxy class {@code Foo}, the * following expression will return true: * <pre> * {@code proxy instanceof Foo} * </pre> * and the following cast operation will succeed (rather than throwing * a {@code ClassCastException}): * <pre> * {@code (Foo) proxy} * </pre> * * <li>Each proxy instance has an associated invocation handler, the one * that was passed to its constructor. The static * {@link Proxy#getInvocationHandler Proxy.getInvocationHandler} method * will return the invocation handler associated with the proxy instance * passed as its argument. * * <li>An interface method invocation on a proxy instance will be * encoded and dispatched to the invocation handler's {@link * InvocationHandler#invoke invoke} method as described in the * documentation for that method. * * <li>An invocation of the {@code hashCode}, * {@code equals}, or {@code toString} methods declared in * {@code java.lang.Object} on a proxy instance will be encoded and * dispatched to the invocation handler's {@code invoke} method in * the same manner as interface method invocations are encoded and * dispatched, as described above. The declaring class of the * {@code Method} object passed to {@code invoke} will be * {@code java.lang.Object}. Other public methods of a proxy * instance inherited from {@code java.lang.Object} are not * overridden by a proxy class, so invocations of those methods behave * like they do for instances of {@code java.lang.Object}. * </ul> * * <h3>Methods Duplicated in Multiple Proxy Interfaces</h3> * * <p>When two or more interfaces of a proxy class contain a method with * the same name and parameter signature, the order of the proxy class's * interfaces becomes significant. When such a <i>duplicate method</i> * is invoked on a proxy instance, the {@code Method} object passed * to the invocation handler will not necessarily be the one whose * declaring class is assignable from the reference type of the interface * that the proxy's method was invoked through. This limitation exists * because the corresponding method implementation in the generated proxy * class cannot determine which interface it was invoked through. * Therefore, when a duplicate method is invoked on a proxy instance, * the {@code Method} object for the method in the foremost interface * that contains the method (either directly or inherited through a * superinterface) in the proxy class's list of interfaces is passed to * the invocation handler's {@code invoke} method, regardless of the * reference type through which the method invocation occurred. * * <p>If a proxy interface contains a method with the same name and * parameter signature as the {@code hashCode}, {@code equals}, * or {@code toString} methods of {@code java.lang.Object}, * when such a method is invoked on a proxy instance, the * {@code Method} object passed to the invocation handler will have * {@code java.lang.Object} as its declaring class. In other words, * the public, non-final methods of {@code java.lang.Object} * logically precede all of the proxy interfaces for the determination of * which {@code Method} object to pass to the invocation handler. * * <p>Note also that when a duplicate method is dispatched to an * invocation handler, the {@code invoke} method may only throw * checked exception types that are assignable to one of the exception * types in the {@code throws} clause of the method in <i>all</i> of * the proxy interfaces that it can be invoked through. If the * {@code invoke} method throws a checked exception that is not * assignable to any of the exception types declared by the method in one * of the proxy interfaces that it can be invoked through, then an * unchecked {@code UndeclaredThrowableException} will be thrown by * the invocation on the proxy instance. This restriction means that not * all of the exception types returned by invoking * {@code getExceptionTypes} on the {@code Method} object * passed to the {@code invoke} method can necessarily be thrown * successfully by the {@code invoke} method. * * @author Peter Jones * @see InvocationHandler * @since 1.3 */ public class Proxy implements java.io.Serializable { private static final long serialVersionUID = -2222568056686623797L; /** parameter types of a proxy class constructor */ private static final Class<?>[] constructorParams = { InvocationHandler.class }; /** * a cache of proxy classes */ private static final WeakCache<ClassLoader, Class<?>[], Class<?>> proxyClassCache = new WeakCache<>(new KeyFactory(), new ProxyClassFactory()); /** * the invocation handler for this proxy instance. * @serial */ protected InvocationHandler h; /** * Prohibits instantiation. */ private Proxy() { } /** * Constructs a new {@code Proxy} instance from a subclass * (typically, a dynamic proxy class) with the specified value * for its invocation handler. * * @param h the invocation handler for this proxy instance * * @throws NullPointerException if the given invocation handler, {@code h}, * is {@code null}. */ protected Proxy(InvocationHandler h) { Objects.requireNonNull(h); this.h = h; } /** * Returns the {@code java.lang.Class} object for a proxy class * given a class loader and an array of interfaces. The proxy class * will be defined by the specified class loader and will implement * all of the supplied interfaces. If any of the given interfaces * is non-public, the proxy class will be non-public. If a proxy class * for the same permutation of interfaces has already been defined by the * class loader, then the existing proxy class will be returned; otherwise, * a proxy class for those interfaces will be generated dynamically * and defined by the class loader. * * <p>There are several restrictions on the parameters that may be * passed to {@code Proxy.getProxyClass}: * * <ul> * <li>All of the {@code Class} objects in the * {@code interfaces} array must represent interfaces, not * classes or primitive types. * * <li>No two elements in the {@code interfaces} array may * refer to identical {@code Class} objects. * * <li>All of the interface types must be visible by name through the * specified class loader. In other words, for class loader * {@code cl} and every interface {@code i}, the following * expression must be true: * <pre> * Class.forName(i.getName(), false, cl) == i * </pre> * * <li>All non-public interfaces must be in the same package; * otherwise, it would not be possible for the proxy class to * implement all of the interfaces, regardless of what package it is * defined in. * * <li>For any set of member methods of the specified interfaces * that have the same signature: * <ul> * <li>If the return type of any of the methods is a primitive * type or void, then all of the methods must have that same * return type. * <li>Otherwise, one of the methods must have a return type that * is assignable to all of the return types of the rest of the * methods. * </ul> * * <li>The resulting proxy class must not exceed any limits imposed * on classes by the virtual machine. For example, the VM may limit * the number of interfaces that a class may implement to 65535; in * that case, the size of the {@code interfaces} array must not * exceed 65535. * </ul> * * <p>If any of these restrictions are violated, * {@code Proxy.getProxyClass} will throw an * {@code IllegalArgumentException}. If the {@code interfaces} * array argument or any of its elements are {@code null}, a * {@code NullPointerException} will be thrown. * * <p>Note that the order of the specified proxy interfaces is * significant: two requests for a proxy class with the same combination * of interfaces but in a different order will result in two distinct * proxy classes. * * @param loader the class loader to define the proxy class * @param interfaces the list of interfaces for the proxy class * to implement * @return a proxy class that is defined in the specified class loader * and that implements the specified interfaces * @throws IllegalArgumentException if any of the restrictions on the * parameters that may be passed to {@code getProxyClass} * are violated * @throws SecurityException if a security manager, <em>s</em>, is present * and any of the following conditions is met: * <ul> * <li> the given {@code loader} is {@code null} and * the caller's class loader is not {@code null} and the * invocation of {@link SecurityManager#checkPermission * s.checkPermission} with * {@code RuntimePermission("getClassLoader")} permission * denies access.</li> * <li> for each proxy interface, {@code intf}, * the caller's class loader is not the same as or an * ancestor of the class loader for {@code intf} and * invocation of {@link SecurityManager#checkPackageAccess * s.checkPackageAccess()} denies access to {@code intf}.</li> * </ul> * @throws NullPointerException if the {@code interfaces} array * argument or any of its elements are {@code null} */ @CallerSensitive public static Class<?> getProxyClass(ClassLoader loader, Class<?>... interfaces) throws IllegalArgumentException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { checkProxyAccess(Reflection.getCallerClass(), loader, interfaces); } return getProxyClass0(loader, interfaces); } /* * Check permissions required to create a Proxy class. * * To define a proxy class, it performs the access checks as in * Class.forName (VM will invoke ClassLoader.checkPackageAccess): * 1. "getClassLoader" permission check if loader == null * 2. checkPackageAccess on the interfaces it implements * * To get a constructor and new instance of a proxy class, it performs * the package access check on the interfaces it implements * as in Class.getConstructor. * * If an interface is non-public, the proxy class must be defined by * the defining loader of the interface. If the caller's class loader * is not the same as the defining loader of the interface, the VM * will throw IllegalAccessError when the generated proxy class is * being defined via the defineClass0 method. */ private static void checkProxyAccess(Class<?> caller, ClassLoader loader, Class<?>... interfaces) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { ClassLoader ccl = caller.getClassLoader(); if (VM.isSystemDomainLoader(loader) && !VM.isSystemDomainLoader(ccl)) { sm.checkPermission(SecurityConstants.GET_CLASSLOADER_PERMISSION); } ReflectUtil.checkProxyPackageAccess(ccl, interfaces); } } /** * Generate a proxy class. Must call the checkProxyAccess method * to perform permission checks before calling this. */ private static Class<?> getProxyClass0(ClassLoader loader, Class<?>... interfaces) { if (interfaces.length > 65535) { throw new IllegalArgumentException("interface limit exceeded"); } // If the proxy class defined by the given loader implementing // the given interfaces exists, this will simply return the cached copy; // otherwise, it will create the proxy class via the ProxyClassFactory return proxyClassCache.get(loader, interfaces); } /* * a key used for proxy class with 0 implemented interfaces */ private static final Object key0 = new Object(); /* * Key1 and Key2 are optimized for the common use of dynamic proxies * that implement 1 or 2 interfaces. */ /* * a key used for proxy class with 1 implemented interface */ private static final class Key1 extends WeakReference<Class<?>> { private final int hash; Key1(Class<?> intf) { super(intf); this.hash = intf.hashCode(); } @Override public int hashCode() { return hash; } @Override public boolean equals(Object obj) { Class<?> intf; return this == obj || obj != null && obj.getClass() == Key1.class && (intf = get()) != null && intf == ((Key1) obj).get(); } } /* * a key used for proxy class with 2 implemented interfaces */ private static final class Key2 extends WeakReference<Class<?>> { private final int hash; private final WeakReference<Class<?>> ref2; Key2(Class<?> intf1, Class<?> intf2) { super(intf1); hash = 31 * intf1.hashCode() + intf2.hashCode(); ref2 = new WeakReference<Class<?>>(intf2); } @Override public int hashCode() { return hash; } @Override public boolean equals(Object obj) { Class<?> intf1, intf2; return this == obj || obj != null && obj.getClass() == Key2.class && (intf1 = get()) != null && intf1 == ((Key2) obj).get() && (intf2 = ref2.get()) != null && intf2 == ((Key2) obj).ref2.get(); } } /* * a key used for proxy class with any number of implemented interfaces * (used here for 3 or more only) */ private static final class KeyX { private final int hash; private final WeakReference<Class<?>>[] refs; @SuppressWarnings("unchecked") KeyX(Class<?>[] interfaces) { hash = Arrays.hashCode(interfaces); refs = (WeakReference<Class<?>>[])new WeakReference<?>[interfaces.length]; for (int i = 0; i < interfaces.length; i++) { refs[i] = new WeakReference<>(interfaces[i]); } } @Override public int hashCode() { return hash; } @Override public boolean equals(Object obj) { return this == obj || obj != null && obj.getClass() == KeyX.class && equals(refs, ((KeyX) obj).refs); } private static boolean equals(WeakReference<Class<?>>[] refs1, WeakReference<Class<?>>[] refs2) { if (refs1.length != refs2.length) { return false; } for (int i = 0; i < refs1.length; i++) { Class<?> intf = refs1[i].get(); if (intf == null || intf != refs2[i].get()) { return false; } } return true; } } /** * A function that maps an array of interfaces to an optimal key where * Class objects representing interfaces are weakly referenced. */ private static final class KeyFactory implements BiFunction<ClassLoader, Class<?>[], Object> { @Override public Object apply(ClassLoader classLoader, Class<?>[] interfaces) { switch (interfaces.length) { case 1: return new Key1(interfaces[0]); // the most frequent case 2: return new Key2(interfaces[0], interfaces[1]); case 0: return key0; default: return new KeyX(interfaces); } } } /** * A factory function that generates, defines and returns the proxy class given * the ClassLoader and array of interfaces. */ private static final class ProxyClassFactory implements BiFunction<ClassLoader, Class<?>[], Class<?>> { // prefix for all proxy class names private static final String proxyClassNamePrefix = "$Proxy"; // next number to use for generation of unique proxy class names private static final AtomicLong nextUniqueNumber = new AtomicLong(); @Override public Class<?> apply(ClassLoader loader, Class<?>[] interfaces) { Map<Class<?>, Boolean> interfaceSet = new IdentityHashMap<>(interfaces.length); for (Class<?> intf : interfaces) { /* * Verify that the class loader resolves the name of this * interface to the same Class object. */ Class<?> interfaceClass = null; try { interfaceClass = Class.forName(intf.getName(), false, loader); } catch (ClassNotFoundException e) { } if (interfaceClass != intf) { throw new IllegalArgumentException( intf + " is not visible from class loader"); } /* * Verify that the Class object actually represents an * interface. */ if (!interfaceClass.isInterface()) { throw new IllegalArgumentException( interfaceClass.getName() + " is not an interface"); } /* * Verify that this interface is not a duplicate. */ if (interfaceSet.put(interfaceClass, Boolean.TRUE) != null) { throw new IllegalArgumentException( "repeated interface: " + interfaceClass.getName()); } } String proxyPkg = null; // package to define proxy class in int accessFlags = Modifier.PUBLIC | Modifier.FINAL; /* * Record the package of a non-public proxy interface so that the * proxy class will be defined in the same package. Verify that * all non-public proxy interfaces are in the same package. */ for (Class<?> intf : interfaces) { int flags = intf.getModifiers(); if (!Modifier.isPublic(flags)) { accessFlags = Modifier.FINAL; String name = intf.getName(); int n = name.lastIndexOf('.'); String pkg = ((n == -1) ? "" : name.substring(0, n + 1)); if (proxyPkg == null) { proxyPkg = pkg; } else if (!pkg.equals(proxyPkg)) { throw new IllegalArgumentException( "non-public interfaces from different packages"); } } } if (proxyPkg == null) { // if no non-public proxy interfaces, use com.sun.proxy package proxyPkg = ReflectUtil.PROXY_PACKAGE + "."; } /* * Choose a name for the proxy class to generate. */ long num = nextUniqueNumber.getAndIncrement(); String proxyName = proxyPkg + proxyClassNamePrefix + num; /* * Generate the specified proxy class. */ byte[] proxyClassFile = ProxyGenerator.generateProxyClass( proxyName, interfaces, accessFlags); try { return defineClass0(loader, proxyName, proxyClassFile, 0, proxyClassFile.length); } catch (ClassFormatError e) { /* * A ClassFormatError here means that (barring bugs in the * proxy class generation code) there was some other * invalid aspect of the arguments supplied to the proxy * class creation (such as virtual machine limitations * exceeded). */ throw new IllegalArgumentException(e.toString()); } } } /** * Returns an instance of a proxy class for the specified interfaces * that dispatches method invocations to the specified invocation * handler. * * <p>{@code Proxy.newProxyInstance} throws * {@code IllegalArgumentException} for the same reasons that * {@code Proxy.getProxyClass} does. * * @param loader the class loader to define the proxy class * @param interfaces the list of interfaces for the proxy class * to implement * @param h the invocation handler to dispatch method invocations to * @return a proxy instance with the specified invocation handler of a * proxy class that is defined by the specified class loader * and that implements the specified interfaces * @throws IllegalArgumentException if any of the restrictions on the * parameters that may be passed to {@code getProxyClass} * are violated * @throws SecurityException if a security manager, <em>s</em>, is present * and any of the following conditions is met: * <ul> * <li> the given {@code loader} is {@code null} and * the caller's class loader is not {@code null} and the * invocation of {@link SecurityManager#checkPermission * s.checkPermission} with * {@code RuntimePermission("getClassLoader")} permission * denies access;</li> * <li> for each proxy interface, {@code intf}, * the caller's class loader is not the same as or an * ancestor of the class loader for {@code intf} and * invocation of {@link SecurityManager#checkPackageAccess * s.checkPackageAccess()} denies access to {@code intf};</li> * <li> any of the given proxy interfaces is non-public and the * caller class is not in the same {@linkplain Package runtime package} * as the non-public interface and the invocation of * {@link SecurityManager#checkPermission s.checkPermission} with * {@code ReflectPermission("newProxyInPackage.{package name}")} * permission denies access.</li> * </ul> * @throws NullPointerException if the {@code interfaces} array * argument or any of its elements are {@code null}, or * if the invocation handler, {@code h}, is * {@code null} */ @CallerSensitive public static Object newProxyInstance(ClassLoader loader, Class<?>[] interfaces, InvocationHandler h) throws IllegalArgumentException { Objects.requireNonNull(h); final SecurityManager sm = System.getSecurityManager(); if (sm != null) { checkProxyAccess(Reflection.getCallerClass(), loader, interfaces); } /* * Look up or generate the designated proxy class. */ Class<?> cl = getProxyClass0(loader, interfaces); /* * Invoke its constructor with the designated invocation handler. */ try { if (sm != null) { checkNewProxyPermission(Reflection.getCallerClass(), cl); } final Constructor<?> cons = cl.getConstructor(constructorParams); final InvocationHandler ih = h; if (!Modifier.isPublic(cl.getModifiers())) { AccessController.doPrivileged(new PrivilegedAction<Void>() { public Void run() { cons.setAccessible(true); return null; } }); } return cons.newInstance(new Object[]{h}); } catch (IllegalAccessException|InstantiationException e) { throw new InternalError(e.toString(), e); } catch (InvocationTargetException e) { Throwable t = e.getCause(); if (t instanceof RuntimeException) { throw (RuntimeException) t; } else { throw new InternalError(t.toString(), t); } } catch (NoSuchMethodException e) { throw new InternalError(e.toString(), e); } } private static void checkNewProxyPermission(Class<?> caller, Class<?> proxyClass) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { if (ReflectUtil.isNonPublicProxyClass(proxyClass)) { ClassLoader ccl = caller.getClassLoader(); ClassLoader pcl = proxyClass.getClassLoader(); // do permission check if the caller is in a different runtime package // of the proxy class int n = proxyClass.getName().lastIndexOf('.'); String pkg = (n == -1) ? "" : proxyClass.getName().substring(0, n); n = caller.getName().lastIndexOf('.'); String callerPkg = (n == -1) ? "" : caller.getName().substring(0, n); if (pcl != ccl || !pkg.equals(callerPkg)) { sm.checkPermission(new ReflectPermission("newProxyInPackage." + pkg)); } } } } /** * Returns true if and only if the specified class was dynamically * generated to be a proxy class using the {@code getProxyClass} * method or the {@code newProxyInstance} method. * * <p>The reliability of this method is important for the ability * to use it to make security decisions, so its implementation should * not just test if the class in question extends {@code Proxy}. * * @param cl the class to test * @return {@code true} if the class is a proxy class and * {@code false} otherwise * @throws NullPointerException if {@code cl} is {@code null} */ public static boolean isProxyClass(Class<?> cl) { return Proxy.class.isAssignableFrom(cl) && proxyClassCache.containsValue(cl); } /** * Returns the invocation handler for the specified proxy instance. * * @param proxy the proxy instance to return the invocation handler for * @return the invocation handler for the proxy instance * @throws IllegalArgumentException if the argument is not a * proxy instance * @throws SecurityException if a security manager, <em>s</em>, is present * and the caller's class loader is not the same as or an * ancestor of the class loader for the invocation handler * and invocation of {@link SecurityManager#checkPackageAccess * s.checkPackageAccess()} denies access to the invocation * handler's class. */ @CallerSensitive public static InvocationHandler getInvocationHandler(Object proxy) throws IllegalArgumentException { /* * Verify that the object is actually a proxy instance. */ if (!isProxyClass(proxy.getClass())) { throw new IllegalArgumentException("not a proxy instance"); } final Proxy p = (Proxy) proxy; final InvocationHandler ih = p.h; if (System.getSecurityManager() != null) { Class<?> ihClass = ih.getClass(); Class<?> caller = Reflection.getCallerClass(); if (ReflectUtil.needsPackageAccessCheck(caller.getClassLoader(), ihClass.getClassLoader())) { ReflectUtil.checkPackageAccess(ihClass); } } return ih; } private static native Class<?> defineClass0(ClassLoader loader, String name, byte[] b, int off, int len); }
apache/jackrabbit-oak
35,137
oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.query.ast; import static java.util.Objects.requireNonNull; import static org.apache.jackrabbit.JcrConstants.NT_BASE; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.PropertyValue; import org.apache.jackrabbit.oak.api.Result.SizePrecision; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.LazyValue; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.core.ImmutableRoot; import org.apache.jackrabbit.oak.plugins.memory.PropertyBuilder; import org.apache.jackrabbit.oak.plugins.metric.util.StatsProviderUtil; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.apache.jackrabbit.oak.query.ExecutionContext; import org.apache.jackrabbit.oak.query.QueryEngineSettings; import org.apache.jackrabbit.oak.query.QueryImpl; import org.apache.jackrabbit.oak.query.QueryOptions; import org.apache.jackrabbit.oak.query.RuntimeNodeTraversalException; import org.apache.jackrabbit.oak.spi.query.fulltext.FullTextExpression; import org.apache.jackrabbit.oak.query.index.FilterImpl; import org.apache.jackrabbit.oak.query.plan.ExecutionPlan; import org.apache.jackrabbit.oak.query.plan.SelectorExecutionPlan; import org.apache.jackrabbit.oak.spi.query.Cursor; import org.apache.jackrabbit.oak.plugins.index.IndexConstants; import org.apache.jackrabbit.oak.plugins.index.cursor.Cursors; import org.apache.jackrabbit.oak.spi.query.IndexRow; import org.apache.jackrabbit.oak.plugins.memory.PropertyValues; import org.apache.jackrabbit.oak.spi.query.QueryConstants; import org.apache.jackrabbit.oak.spi.query.QueryIndex; import org.apache.jackrabbit.oak.spi.query.QueryIndex.AdvancedQueryIndex; import org.apache.jackrabbit.oak.spi.query.QueryIndex.IndexPlan; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.PrefetchNodeStore; import org.apache.jackrabbit.oak.stats.StatsOptions; import org.apache.jackrabbit.oak.stats.TimerStats; import org.apache.jackrabbit.oak.stats.CounterStats; import org.apache.jackrabbit.oak.stats.HistogramStats; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A selector within a query. */ public class SelectorImpl extends SourceImpl { private static final Logger LOG = LoggerFactory.getLogger(SelectorImpl.class); private static final Boolean TIMER_DISABLED = Boolean.getBoolean("oak.query.timerDisabled"); // The sample rate. Must be a power of 2. private static final Long TIMER_SAMPLE_RATE = Long.getLong("oak.query.timerSampleRate", 0x100); private static final long SLOW_QUERY_HISTOGRAM = 1; private static final long TOTAL_QUERY_HISTOGRAM = 0; private static final String SLOW_QUERY_PERCENTILE_METRICS_NAME = "SLOW_QUERY_PERCENTILE_METRICS"; private static final String SLOW_QUERY_COUNT_NAME = "SLOW_QUERY_COUNT"; private static long timerSampleCounter; // TODO possibly support using multiple indexes (using index intersection / index merge) private SelectorExecutionPlan plan; /** * The WHERE clause of the query. */ private ConstraintImpl queryConstraint; /** * The join condition of this selector that can be evaluated at execution * time. For the query "select * from nt:base as a inner join nt:base as b * on a.x = b.x", the join condition "a.x = b.x" is only set for the * selector b, as selector a can't evaluate it if it is executed first * (until b is executed). */ private JoinConditionImpl joinCondition; /** * The node type associated with the {@link #nodeTypeName} */ private final NodeTypeInfo nodeTypeInfo; private final String selectorName; private final String nodeTypeName; private final boolean matchesAllTypes; /** * All of the matching supertypes, or empty if the {@link #matchesAllTypes} * flag is set */ private final Set<String> supertypes; /** * All of the matching primary subtypes, or empty if the * {@link #matchesAllTypes} flag is set */ private final Set<String> primaryTypes; /** * All of the matching mixin types, or empty if the {@link #matchesAllTypes} * flag is set */ private final Set<String> mixinTypes; /** * Whether this selector is the parent of a descendent or parent-child join. * Access rights don't need to be checked in such selectors (unless there * are conditions on the selector). */ private boolean isParent; /** * Whether this selector is the left hand side of a left outer join. * Right outer joins are converted to left outer join. */ private boolean outerJoinLeftHandSide; /** * Whether this selector is the right hand side of a left outer join. * Right outer joins are converted to left outer join. */ private boolean outerJoinRightHandSide; /** * The list of all join conditions this selector is involved. For the query * "select * from nt:base as a inner join nt:base as b on a.x = * b.x", the join condition "a.x = b.x" is set for both selectors a and b, * so both can check if the property x is set. * The join conditions are added during the init phase. */ private ArrayList<JoinConditionImpl> allJoinConditions = new ArrayList<JoinConditionImpl>(); /** * The selector constraints can be evaluated when the given selector is * evaluated. For example, for the query * "select * from nt:base a inner join nt:base b where a.x = 1 and b.y = 2", * the condition "a.x = 1" can be evaluated when evaluating selector a. The * other part of the condition can't be evaluated until b is available. * These constraints are collected during the prepare phase. */ private final List<ConstraintImpl> selectorConstraints = new ArrayList<>(); private Cursor cursor; private IndexRow currentRow; private int scanCount; private String planIndexName; private TimerStats timerDuration; private CachedTree cachedTree; private boolean updateTotalQueryHistogram = true; public SelectorImpl(NodeTypeInfo nodeTypeInfo, String selectorName) { this.nodeTypeInfo = requireNonNull(nodeTypeInfo); this.selectorName = requireNonNull(selectorName); this.nodeTypeName = nodeTypeInfo.getNodeTypeName(); this.matchesAllTypes = NT_BASE.equals(nodeTypeName); if (!this.matchesAllTypes) { this.supertypes = nodeTypeInfo.getSuperTypes(); supertypes.add(nodeTypeName); this.primaryTypes = nodeTypeInfo.getPrimarySubTypes(); this.mixinTypes = nodeTypeInfo.getMixinSubTypes(); if (nodeTypeInfo.isMixin()) { mixinTypes.add(nodeTypeName); } else { primaryTypes.add(nodeTypeName); } } else { this.supertypes = Set.of(); this.primaryTypes = Set.of(); this.mixinTypes = Set.of(); } } public String getSelectorName() { return selectorName; } public String getNodeType() { return nodeTypeName; } public boolean matchesAllTypes() { return matchesAllTypes; } /** * @return all of the matching supertypes, or empty if the * {@link #matchesAllTypes} flag is set */ @NotNull public Set<String> getSupertypes() { return supertypes; } /** * @return all of the matching primary subtypes, or empty if the * {@link #matchesAllTypes} flag is set */ @NotNull public Set<String> getPrimaryTypes() { return primaryTypes; } /** * @return all of the matching mixin types, or empty if the * {@link #matchesAllTypes} flag is set */ @NotNull public Set<String> getMixinTypes() { return mixinTypes; } public Iterable<String> getWildcardColumns() { return nodeTypeInfo.getNamesSingleValuesProperties(); } @Override boolean accept(AstVisitor v) { return v.visit(this); } @Override public String toString() { return quote(nodeTypeName) + " as " + quote(selectorName); } public boolean isPrepared() { return plan != null; } @Override public void unprepare() { plan = null; planIndexName = null; timerDuration = null; selectorConstraints.clear(); isParent = false; joinCondition = null; allJoinConditions.clear(); } @Override public void prepare(ExecutionPlan p) { if (!(p instanceof SelectorExecutionPlan)) { throw new IllegalArgumentException("Not a selector plan"); } SelectorExecutionPlan selectorPlan = (SelectorExecutionPlan) p; if (selectorPlan.getSelector() != this) { throw new IllegalArgumentException("Not a plan for this selector"); } pushDown(); this.plan = selectorPlan; } private void pushDown() { if (queryConstraint != null) { queryConstraint.restrictPushDown(this); } if (!outerJoinLeftHandSide && !outerJoinRightHandSide) { for (JoinConditionImpl c : allJoinConditions) { c.restrictPushDown(this); } } } @Override public ExecutionPlan prepare() { if (plan != null) { return plan; } pushDown(); plan = query.getBestSelectorExecutionPlan(createFilter(true)); return plan; } public SelectorExecutionPlan getExecutionPlan() { return plan; } @Override public void setQueryConstraint(ConstraintImpl queryConstraint) { this.queryConstraint = queryConstraint; } @Override public void setOuterJoin(boolean outerJoinLeftHandSide, boolean outerJoinRightHandSide) { this.outerJoinLeftHandSide = outerJoinLeftHandSide; this.outerJoinRightHandSide = outerJoinRightHandSide; } @Override public void addJoinCondition(JoinConditionImpl joinCondition, boolean forThisSelector) { if (forThisSelector) { this.joinCondition = joinCondition; } allJoinConditions.add(joinCondition); if (joinCondition.isParent(this)) { isParent = true; } } @Override public void execute(NodeState rootState) { long start = startTimer(); try { executeInternal(rootState); } finally { stopTimer(start, true); } } private void executeInternal(NodeState rootState) { QueryIndex index = plan.getIndex(); timerDuration = null; if (index == null) { cursor = Cursors.newPathCursor(new ArrayList<String>(), query.getSettings()); planIndexName = "traverse"; return; } IndexPlan p = plan.getIndexPlan(); if (p != null) { planIndexName = p.getPlanName(); p.setFilter(createFilter(false)); AdvancedQueryIndex adv = (AdvancedQueryIndex) index; cursor = adv.query(p, rootState); } else { FilterImpl f = createFilter(false); planIndexName = index.getIndexName(f, rootState); cursor = index.query(f, rootState); } int prefetchCount = query.getQueryOptions().prefetchCount. orElse(query.getExecutionContext().getSettings().getPrefetchCount()); if (prefetchCount > 0) { PrefetchNodeStore store = query.getExecutionContext().getPrefetchNodeStore(); cursor = Cursors.newPrefetchCursor(cursor, store, prefetchCount, rootState, query.getQueryOptions().prefetch); } } private long startTimer() { if (TIMER_DISABLED) { return -1; } return System.nanoTime(); } private void stopTimer(long start, boolean execute) { if (start == -1) { return; } long timeNanos = System.nanoTime() - start; if (timeNanos > 1000000) { // always measure slow events (slower than 1 ms) measure(timeNanos); } else if ((timerSampleCounter++ & (TIMER_SAMPLE_RATE - 1)) == 0) { // only measure each xth fast event, but multiply by x, so on // average measured times are correct measure(timeNanos * TIMER_SAMPLE_RATE); } } private void measure(long timeNanos) { TimerStats t = timerDuration; if (t == null) { // reuse the timer (in the normal case) // QUERY_DURATION;index=<planIndexName> will be translated as metric name = QUERY_DURATION // and index=<planIndexName> as a label by a downstream consumer like prometheus. StatsProviderUtil statsProviderUtil = new StatsProviderUtil(query.getSettings().getStatisticsProvider()); t = timerDuration = statsProviderUtil.getTimerStats().apply("QUERY_DURATION", Collections.singletonMap("index", planIndexName)); } t.update(timeNanos, TimeUnit.NANOSECONDS); } @Override public String getPlan(NodeState rootState) { StringBuilder buff = new StringBuilder(); buff.append(toString()); buff.append(" /* "); QueryIndex index = getIndex(); if (index != null) { if (index instanceof AdvancedQueryIndex) { AdvancedQueryIndex adv = (AdvancedQueryIndex) index; IndexPlan p = plan.getIndexPlan(); buff.append(adv.getPlanDescription(p, rootState)); } else { buff.append(index.getPlan(createFilter(true), rootState)); } } else { buff.append("no-index\n"); } if (!selectorConstraints.isEmpty()) { // we could add the selector constraints here, // but it turns out this distracts more than it helps - // however for the JSON representation it would be useful, // that's why I think it makes sense to keep the commented code for now // buff.append(" selectorCondition: ").append(new AndImpl(selectorConstraints).toString()).append("\n"); } buff.append(" */"); return buff.toString(); } @Override public String getIndexCostInfo(NodeState rootState) { StringBuilder buff = new StringBuilder(); buff.append(quoteJson(selectorName)).append(": "); QueryIndex index = getIndex(); if (index != null) { if (index instanceof AdvancedQueryIndex) { IndexPlan p = plan.getIndexPlan(); buff.append("{ perEntry: ").append(p.getCostPerEntry()); buff.append(", perExecution: ").append(p.getCostPerExecution()); buff.append(", count: ").append(p.getEstimatedEntryCount()); buff.append(" }"); } else { buff.append(index.getCost(createFilter(true), rootState)); } } return buff.toString(); } /** * Create the filter condition for planning or execution. * * @param preparing whether a filter for the prepare phase should be made * @return the filter */ @Override public FilterImpl createFilter(boolean preparing) { FilterImpl f = new FilterImpl(this, query.getStatement(), query.getSettings()); f.setPreparing(preparing); if (joinCondition != null) { joinCondition.restrict(f); } // rep:excerpt handling: create a (fake) restriction // "rep:excerpt is not null" to let the index know that // we will need the excerpt for (ColumnImpl c : query.getColumns()) { if (c.getSelector().equals(this)) { String columnName = c.getColumnName(); if (columnName.equals(QueryConstants.OAK_SCORE_EXPLANATION)) { f.restrictProperty(columnName, Operator.NOT_EQUAL, null); } else if (columnName.startsWith(QueryConstants.REP_EXCERPT)) { f.restrictProperty(QueryConstants.REP_EXCERPT, Operator.EQUAL, PropertyValues.newString(columnName)); } else if (columnName.startsWith(QueryConstants.REP_FACET)) { f.restrictProperty(QueryConstants.REP_FACET, Operator.EQUAL, PropertyValues.newString(columnName)); } } } // all conditions can be pushed to the selectors - // except in some cases to "outer joined" selectors, // but the exceptions are handled in the condition // itself. // An example where it *is* a problem: // "select * from a left outer join b on a.x = b.y // where b.y is null" - in this case the selector b // must not use an index condition on "y is null" // (".. is null" must be written as "not .. is not null"). if (queryConstraint != null) { queryConstraint.restrict(f); FullTextExpression ft = queryConstraint.getFullTextConstraint(this); f.setFullTextConstraint(ft); } for (ConstraintImpl constraint : selectorConstraints) { constraint.restrict(f); } QueryOptions options = query.getQueryOptions(); if (options != null) { if (options.indexName != null) { f.restrictProperty(IndexConstants.INDEX_NAME_OPTION, Operator.EQUAL, PropertyValues.newString(options.indexName)); } if (options.indexTag != null) { f.restrictProperty(IndexConstants.INDEX_TAG_OPTION, Operator.EQUAL, PropertyValues.newString(options.indexTag)); } } return f; } @Override public boolean next() { long start = startTimer(); try { return nextInternal(); } finally { stopTimer(start, true); } } private boolean nextInternal() { while (cursor != null && cursor.hasNext()) { scanCount++; query.getQueryExecutionStats().scan(1, scanCount); try { totalQueryStats(query.getSettings()); currentRow = cursor.next(); } catch (RuntimeNodeTraversalException e) { addSlowQueryStats(query.getSettings()); LOG.warn(e.getMessage() + " for query " + query.getStatement()); throw e; } if (isParent) { // we must not check whether the _parent_ is readable // for joins of type // "select [b].[jcr:primaryType] // from [nt:base] as [a] // inner join [nt:base] as [b] // on isdescendantnode([b], [a]) // where [b].[jcr:path] = $path" // because if we did, we would filter out // correct results } else if (currentRow.isVirtualRow()) { // this is a virtual row and should be selected as is return true; } else { // we must check whether the _child_ is readable // (even if no properties are read) for joins of type // "select [a].[jcr:primaryType] // from [nt:base] as [a] // inner join [nt:base] as [b] // on isdescendantnode([b], [a]) // where [a].[jcr:path] = $path" // because not checking would reveal existence // of the child node if (!getCachedTree(currentRow.getPath()).exists()) { continue; } } if (evaluateCurrentRow()) { return true; } } cursor = null; currentRow = null; return false; } private void totalQueryStats(QueryEngineSettings queryEngineSettings) { if (updateTotalQueryHistogram) { updateTotalQueryHistogram = false; HistogramStats histogramStats = queryEngineSettings.getStatisticsProvider().getHistogram(SLOW_QUERY_PERCENTILE_METRICS_NAME, StatsOptions.METRICS_ONLY); histogramStats.update(TOTAL_QUERY_HISTOGRAM); } } private void addSlowQueryStats(QueryEngineSettings queryEngineSettings) { HistogramStats histogramStats = queryEngineSettings.getStatisticsProvider().getHistogram(SLOW_QUERY_PERCENTILE_METRICS_NAME, StatsOptions.METRICS_ONLY); histogramStats.update(SLOW_QUERY_HISTOGRAM); CounterStats slowQueryCounter = queryEngineSettings.getStatisticsProvider().getCounterStats(SLOW_QUERY_COUNT_NAME, StatsOptions.METRICS_ONLY); slowQueryCounter.inc(); } private boolean evaluateCurrentRow() { if (currentRow.isVirtualRow()) { //null path implies that all checks are already done -- we just need to pass it through return true; } if (!matchesAllTypes && !evaluateTypeMatch()) { return false; } for (ConstraintImpl constraint : selectorConstraints) { if (!constraint.evaluate()) { if (constraint.evaluateStop()) { // stop processing from now on cursor = null; } return false; } } if (joinCondition != null && !joinCondition.evaluate()) { return false; } return true; } private boolean evaluateTypeMatch() { CachedTree ct = getCachedTree(currentRow.getPath()); if (!ct.exists()) { return false; } Tree t = ct.getTree(); LazyValue<Tree> readOnly = ct.getReadOnlyTree(); String primaryTypeName = TreeUtil.getPrimaryTypeName(t, readOnly); if (primaryTypeName != null && primaryTypes.contains(primaryTypeName)) { return true; } for (String mixinName : TreeUtil.getMixinTypeNames(t, readOnly)) { if (mixinTypes.contains(mixinName)) { return true; } } // no matches found return false; } /** * Get the current absolute Oak path (normalized). * * @return the path */ public String currentPath() { return cursor == null ? null : currentRow.getPath(); } /** * Get the tree at the current path. * * @return the current tree, or null */ @Nullable public Tree currentTree() { String path = currentPath(); if (path == null) { return null; } return getTree(path); } @Nullable Tree getTree(@NotNull String path) { return getCachedTree(path).getTree(); } /** * Get the tree at the given path. * * @param path the path * @return the tree, or null */ @NotNull private CachedTree getCachedTree(@NotNull String path) { if (cachedTree == null || !cachedTree.denotes(path)) { cachedTree = new CachedTree(path, query); } return cachedTree; } /** * The value for the given selector for the current node. * * @param propertyName the JCR (not normalized) property name * @return the property value */ public PropertyValue currentProperty(String propertyName) { String pn = normalizePropertyName(propertyName); return currentOakProperty(pn); } /** * The value for the given selector for the current node, filtered by * property type. * * @param propertyName the JCR (not normalized) property name * @param propertyType only include properties of this type * @return the property value (possibly null) */ public PropertyValue currentProperty(String propertyName, int propertyType) { String pn = normalizePropertyName(propertyName); return currentOakProperty(pn, propertyType); } /** * Get the property value. The property name may be relative. The special * property names "jcr:path", "jcr:score" and "rep:excerpt" are supported. * * @param oakPropertyName (must already be normalized) * @return the property value or null if not found */ public PropertyValue currentOakProperty(String oakPropertyName) { return currentOakProperty(oakPropertyName, null); } private PropertyValue currentOakProperty(String oakPropertyName, Integer propertyType) { boolean asterisk = oakPropertyName.indexOf('*') >= 0; if (asterisk) { Tree t = currentTree(); if (t != null) { LOG.trace("currentOakProperty() - '*' case. looking for '{}' in '{}'", oakPropertyName, t.getPath()); } ArrayList<PropertyValue> list = new ArrayList<PropertyValue>(); readOakProperties(list, t, oakPropertyName, propertyType); if (list.size() == 0) { return null; } else if (list.size() == 1) { return list.get(0); } Type<?> type = list.get(0).getType(); for (int i = 1; i < list.size(); i++) { Type<?> t2 = list.get(i).getType(); if (t2 != type) { // types don't match type = Type.STRING; break; } } if (type == Type.STRING) { ArrayList<String> strings = new ArrayList<String>(); for (PropertyValue p : list) { p.getValue(Type.STRINGS).forEach(strings::add); } return PropertyValues.newString(strings); } Type<?> baseType = type.isArray() ? type.getBaseType() : type; @SuppressWarnings("unchecked") PropertyBuilder<Object> builder = (PropertyBuilder<Object>) PropertyBuilder.array(baseType); builder.setName(""); for (PropertyValue v : list) { if (type.isArray()) { for (Object value : (Iterable<?>) v.getValue(type)) { builder.addValue(value); } } else { builder.addValue(v.getValue(type)); } } PropertyState s = builder.getPropertyState(); return PropertyValues.create(s); } boolean relative = !oakPropertyName.startsWith(QueryConstants.REP_FACET + "(") && !oakPropertyName.startsWith(QueryConstants.REP_EXCERPT + "(") && oakPropertyName.indexOf('/') >= 0; Tree t = currentTree(); if (relative) { for (String p : PathUtils.elements(PathUtils.getParentPath(oakPropertyName))) { if (t == null) { return null; } if (p.equals("..")) { t = t.isRoot() ? null : t.getParent(); } else if (p.equals(".")) { // same node } else { t = t.getChild(p); } } oakPropertyName = PathUtils.getName(oakPropertyName); } return currentOakProperty(t, oakPropertyName, propertyType); } private PropertyValue currentOakProperty(Tree t, String oakPropertyName, Integer propertyType) { PropertyValue result; if ((t == null || !t.exists()) && (currentRow == null || !currentRow.isVirtualRow())) { return null; } if (oakPropertyName.equals(QueryConstants.JCR_PATH)) { String path = currentPath(); String local = getLocalPath(path); if (local == null) { // not a local path return null; } result = PropertyValues.newString(local); } else if (oakPropertyName.equals(QueryConstants.JCR_SCORE)) { result = currentRow.getValue(QueryConstants.JCR_SCORE); } else if (oakPropertyName.equals(QueryConstants.REP_EXCERPT) || oakPropertyName.startsWith(QueryConstants.REP_EXCERPT + "(")) { result = currentRow.getValue(oakPropertyName); } else if (oakPropertyName.equals(QueryConstants.OAK_SCORE_EXPLANATION)) { result = currentRow.getValue(QueryConstants.OAK_SCORE_EXPLANATION); } else if (oakPropertyName.equals(QueryConstants.REP_SPELLCHECK)) { result = currentRow.getValue(QueryConstants.REP_SPELLCHECK); } else if (oakPropertyName.equals(QueryConstants.REP_SUGGEST)) { result = currentRow.getValue(QueryConstants.REP_SUGGEST); } else if (oakPropertyName.startsWith(QueryConstants.REP_FACET + "(")) { result = currentRow.getValue(oakPropertyName); } else { result = PropertyValues.create(t.getProperty(oakPropertyName)); } if (result == null) { return null; } if (propertyType != null && result.getType().tag() != propertyType) { return null; } return result; } private void readOakProperties(ArrayList<PropertyValue> target, Tree t, String oakPropertyName, Integer propertyType) { boolean skipCurrentNode = false; while (!skipCurrentNode) { if (t == null || !t.exists()) { return; } LOG.trace("readOakProperties() - reading '{}' for '{}'", t.getPath(), oakPropertyName); int slash = oakPropertyName.indexOf('/'); if (slash < 0) { break; } String parent = oakPropertyName.substring(0, slash); oakPropertyName = oakPropertyName.substring(slash + 1); if (parent.equals("..")) { t = t.isRoot() ? null : t.getParent(); } else if (parent.equals(".")) { // same node } else if (parent.equals("*")) { for (Tree child : t.getChildren()) { readOakProperties(target, child, oakPropertyName, propertyType); } skipCurrentNode = true; } else { t = t.getChild(parent); } } if (skipCurrentNode) { return; } if (!"*".equals(oakPropertyName)) { PropertyValue value = currentOakProperty(t, oakPropertyName, propertyType); if (value != null) { LOG.trace("readOakProperties() - adding: '{}' from '{}'", value, t.getPath()); target.add(value); } return; } for (PropertyState p : t.getProperties()) { if (propertyType == null || p.getType().tag() == propertyType) { PropertyValue v = PropertyValues.create(p); target.add(v); } } } public boolean isVirtualRow() { return currentRow != null && currentRow.isVirtualRow(); } @Override public SelectorImpl getSelector(String selectorName) { if (selectorName.equals(this.selectorName)) { return this; } return null; } public long getScanCount() { return scanCount; } public void restrictSelector(ConstraintImpl constraint) { selectorConstraints.add(constraint); } public List<ConstraintImpl> getSelectorConstraints() { return selectorConstraints; } @Override public boolean equals(Object other) { if (this == other) { return true; } else if (!(other instanceof SelectorImpl)) { return false; } return selectorName.equals(((SelectorImpl) other).selectorName); } @Override public int hashCode() { return selectorName.hashCode(); } QueryIndex getIndex() { return plan == null ? null : plan.getIndex(); } @Override public ArrayList<SourceImpl> getInnerJoinSelectors() { ArrayList<SourceImpl> list = new ArrayList<SourceImpl>(); list.add(this); return list; } @Override public boolean isOuterJoinRightHandSide() { return this.outerJoinRightHandSide; } public QueryImpl getQuery() { return query; } @Override public long getSize(NodeState rootState, SizePrecision precision, long max) { if (cursor == null) { execute(rootState); } return cursor.getSize(precision, max); } @Override public SourceImpl copyOf() { return new SelectorImpl(nodeTypeInfo, selectorName); } private static final class CachedTree { private final String path; private final Tree tree; private final ExecutionContext ctx; private final LazyValue<Tree> readOnlyTree; private CachedTree(@NotNull String path, @NotNull QueryImpl query) { this.path = path; this.tree = query.getTree(path); this.ctx = query.getExecutionContext(); this.readOnlyTree = new LazyValue<Tree>() { @Override protected Tree createValue() { return new ImmutableRoot(ctx.getBaseState()).getTree(path); } }; } private boolean denotes(@NotNull String path) { return this.path.equals(path); } private boolean exists() { return tree != null && tree.exists(); } @Nullable private Tree getTree() { return tree; } @NotNull private LazyValue<Tree> getReadOnlyTree() { return readOnlyTree; } } }
googleapis/google-cloud-java
35,202
java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/ChangelogsClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3beta1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.cx.v3beta1.stub.ChangelogsStub; import com.google.cloud.dialogflow.cx.v3beta1.stub.ChangelogsStubSettings; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service for managing * [Changelogs][google.cloud.dialogflow.cx.v3beta1.Changelog]. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ChangelogName name = ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]"); * Changelog response = changelogsClient.getChangelog(name); * } * }</pre> * * <p>Note: close() needs to be called on the ChangelogsClient object to clean up resources such as * threads. In the example above, try-with-resources is used, which automatically calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> ListChangelogs</td> * <td><p> Returns the list of Changelogs.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listChangelogs(ListChangelogsRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> listChangelogs(AgentName parent) * <li><p> listChangelogs(String parent) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listChangelogsPagedCallable() * <li><p> listChangelogsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> GetChangelog</td> * <td><p> Retrieves the specified Changelog.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getChangelog(GetChangelogRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> getChangelog(ChangelogName name) * <li><p> getChangelog(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getChangelogCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListLocations</td> * <td><p> Lists information about the supported locations for this service.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listLocations(ListLocationsRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listLocationsPagedCallable() * <li><p> listLocationsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> GetLocation</td> * <td><p> Gets information about a location.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getLocation(GetLocationRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getLocationCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of ChangelogsSettings to * create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ChangelogsSettings changelogsSettings = * ChangelogsSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * ChangelogsClient changelogsClient = ChangelogsClient.create(changelogsSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ChangelogsSettings changelogsSettings = * ChangelogsSettings.newBuilder().setEndpoint(myEndpoint).build(); * ChangelogsClient changelogsClient = ChangelogsClient.create(changelogsSettings); * }</pre> * * <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over * the wire: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ChangelogsSettings changelogsSettings = ChangelogsSettings.newHttpJsonBuilder().build(); * ChangelogsClient changelogsClient = ChangelogsClient.create(changelogsSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi @Generated("by gapic-generator-java") public class ChangelogsClient implements BackgroundResource { private final ChangelogsSettings settings; private final ChangelogsStub stub; /** Constructs an instance of ChangelogsClient with default settings. */ public static final ChangelogsClient create() throws IOException { return create(ChangelogsSettings.newBuilder().build()); } /** * Constructs an instance of ChangelogsClient, using the given settings. The channels are created * based on the settings passed in, or defaults for any settings that are not set. */ public static final ChangelogsClient create(ChangelogsSettings settings) throws IOException { return new ChangelogsClient(settings); } /** * Constructs an instance of ChangelogsClient, using the given stub for making calls. This is for * advanced usage - prefer using create(ChangelogsSettings). */ public static final ChangelogsClient create(ChangelogsStub stub) { return new ChangelogsClient(stub); } /** * Constructs an instance of ChangelogsClient, using the given settings. This is protected so that * it is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected ChangelogsClient(ChangelogsSettings settings) throws IOException { this.settings = settings; this.stub = ((ChangelogsStubSettings) settings.getStubSettings()).createStub(); } protected ChangelogsClient(ChangelogsStub stub) { this.settings = null; this.stub = stub; } public final ChangelogsSettings getSettings() { return settings; } public ChangelogsStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of Changelogs. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]"); * for (Changelog element : changelogsClient.listChangelogs(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The agent containing the changelogs. Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListChangelogsPagedResponse listChangelogs(AgentName parent) { ListChangelogsRequest request = ListChangelogsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listChangelogs(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of Changelogs. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * String parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString(); * for (Changelog element : changelogsClient.listChangelogs(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The agent containing the changelogs. Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListChangelogsPagedResponse listChangelogs(String parent) { ListChangelogsRequest request = ListChangelogsRequest.newBuilder().setParent(parent).build(); return listChangelogs(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of Changelogs. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ListChangelogsRequest request = * ListChangelogsRequest.newBuilder() * .setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString()) * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (Changelog element : changelogsClient.listChangelogs(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListChangelogsPagedResponse listChangelogs(ListChangelogsRequest request) { return listChangelogsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of Changelogs. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ListChangelogsRequest request = * ListChangelogsRequest.newBuilder() * .setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString()) * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<Changelog> future = * changelogsClient.listChangelogsPagedCallable().futureCall(request); * // Do something. * for (Changelog element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListChangelogsRequest, ListChangelogsPagedResponse> listChangelogsPagedCallable() { return stub.listChangelogsPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of Changelogs. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ListChangelogsRequest request = * ListChangelogsRequest.newBuilder() * .setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString()) * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * ListChangelogsResponse response = changelogsClient.listChangelogsCallable().call(request); * for (Changelog element : response.getChangelogsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListChangelogsRequest, ListChangelogsResponse> listChangelogsCallable() { return stub.listChangelogsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the specified Changelog. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ChangelogName name = ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]"); * Changelog response = changelogsClient.getChangelog(name); * } * }</pre> * * @param name Required. The name of the changelog to get. Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/changelogs/&lt;ChangelogID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Changelog getChangelog(ChangelogName name) { GetChangelogRequest request = GetChangelogRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return getChangelog(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the specified Changelog. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * String name = * ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString(); * Changelog response = changelogsClient.getChangelog(name); * } * }</pre> * * @param name Required. The name of the changelog to get. Format: * `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;/changelogs/&lt;ChangelogID&gt;`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Changelog getChangelog(String name) { GetChangelogRequest request = GetChangelogRequest.newBuilder().setName(name).build(); return getChangelog(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the specified Changelog. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * GetChangelogRequest request = * GetChangelogRequest.newBuilder() * .setName( * ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString()) * .build(); * Changelog response = changelogsClient.getChangelog(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Changelog getChangelog(GetChangelogRequest request) { return getChangelogCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the specified Changelog. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * GetChangelogRequest request = * GetChangelogRequest.newBuilder() * .setName( * ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString()) * .build(); * ApiFuture<Changelog> future = changelogsClient.getChangelogCallable().futureCall(request); * // Do something. * Changelog response = future.get(); * } * }</pre> */ public final UnaryCallable<GetChangelogRequest, Changelog> getChangelogCallable() { return stub.getChangelogCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists information about the supported locations for this service. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ListLocationsRequest request = * ListLocationsRequest.newBuilder() * .setName("name3373707") * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (Location element : changelogsClient.listLocations(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListLocationsPagedResponse listLocations(ListLocationsRequest request) { return listLocationsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists information about the supported locations for this service. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ListLocationsRequest request = * ListLocationsRequest.newBuilder() * .setName("name3373707") * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<Location> future = * changelogsClient.listLocationsPagedCallable().futureCall(request); * // Do something. * for (Location element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return stub.listLocationsPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists information about the supported locations for this service. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * ListLocationsRequest request = * ListLocationsRequest.newBuilder() * .setName("name3373707") * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * ListLocationsResponse response = changelogsClient.listLocationsCallable().call(request); * for (Location element : response.getLocationsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return stub.listLocationsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets information about a location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); * Location response = changelogsClient.getLocation(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Location getLocation(GetLocationRequest request) { return getLocationCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets information about a location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ChangelogsClient changelogsClient = ChangelogsClient.create()) { * GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); * ApiFuture<Location> future = changelogsClient.getLocationCallable().futureCall(request); * // Do something. * Location response = future.get(); * } * }</pre> */ public final UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return stub.getLocationCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListChangelogsPagedResponse extends AbstractPagedListResponse< ListChangelogsRequest, ListChangelogsResponse, Changelog, ListChangelogsPage, ListChangelogsFixedSizeCollection> { public static ApiFuture<ListChangelogsPagedResponse> createAsync( PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context, ApiFuture<ListChangelogsResponse> futureResponse) { ApiFuture<ListChangelogsPage> futurePage = ListChangelogsPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListChangelogsPagedResponse(input), MoreExecutors.directExecutor()); } private ListChangelogsPagedResponse(ListChangelogsPage page) { super(page, ListChangelogsFixedSizeCollection.createEmptyCollection()); } } public static class ListChangelogsPage extends AbstractPage< ListChangelogsRequest, ListChangelogsResponse, Changelog, ListChangelogsPage> { private ListChangelogsPage( PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context, ListChangelogsResponse response) { super(context, response); } private static ListChangelogsPage createEmptyPage() { return new ListChangelogsPage(null, null); } @Override protected ListChangelogsPage createPage( PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context, ListChangelogsResponse response) { return new ListChangelogsPage(context, response); } @Override public ApiFuture<ListChangelogsPage> createPageAsync( PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context, ApiFuture<ListChangelogsResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListChangelogsFixedSizeCollection extends AbstractFixedSizeCollection< ListChangelogsRequest, ListChangelogsResponse, Changelog, ListChangelogsPage, ListChangelogsFixedSizeCollection> { private ListChangelogsFixedSizeCollection(List<ListChangelogsPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListChangelogsFixedSizeCollection createEmptyCollection() { return new ListChangelogsFixedSizeCollection(null, 0); } @Override protected ListChangelogsFixedSizeCollection createCollection( List<ListChangelogsPage> pages, int collectionSize) { return new ListChangelogsFixedSizeCollection(pages, collectionSize); } } public static class ListLocationsPagedResponse extends AbstractPagedListResponse< ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage, ListLocationsFixedSizeCollection> { public static ApiFuture<ListLocationsPagedResponse> createAsync( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ApiFuture<ListLocationsResponse> futureResponse) { ApiFuture<ListLocationsPage> futurePage = ListLocationsPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListLocationsPagedResponse(input), MoreExecutors.directExecutor()); } private ListLocationsPagedResponse(ListLocationsPage page) { super(page, ListLocationsFixedSizeCollection.createEmptyCollection()); } } public static class ListLocationsPage extends AbstractPage< ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage> { private ListLocationsPage( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ListLocationsResponse response) { super(context, response); } private static ListLocationsPage createEmptyPage() { return new ListLocationsPage(null, null); } @Override protected ListLocationsPage createPage( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ListLocationsResponse response) { return new ListLocationsPage(context, response); } @Override public ApiFuture<ListLocationsPage> createPageAsync( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ApiFuture<ListLocationsResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListLocationsFixedSizeCollection extends AbstractFixedSizeCollection< ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage, ListLocationsFixedSizeCollection> { private ListLocationsFixedSizeCollection(List<ListLocationsPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListLocationsFixedSizeCollection createEmptyCollection() { return new ListLocationsFixedSizeCollection(null, 0); } @Override protected ListLocationsFixedSizeCollection createCollection( List<ListLocationsPage> pages, int collectionSize) { return new ListLocationsFixedSizeCollection(pages, collectionSize); } } }
googleapis/sdk-platform-java
35,012
java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/ListSessionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: schema/google/showcase/v1beta1/testing.proto // Protobuf Java Version: 3.25.8 package com.google.showcase.v1beta1; /** * * * <pre> * Response for the ListSessions method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ListSessionsResponse} */ public final class ListSessionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.showcase.v1beta1.ListSessionsResponse) ListSessionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSessionsResponse.newBuilder() to construct. private ListSessionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSessionsResponse() { sessions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSessionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.TestingOuterClass .internal_static_google_showcase_v1beta1_ListSessionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.TestingOuterClass .internal_static_google_showcase_v1beta1_ListSessionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ListSessionsResponse.class, com.google.showcase.v1beta1.ListSessionsResponse.Builder.class); } public static final int SESSIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.showcase.v1beta1.Session> sessions_; /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ @java.lang.Override public java.util.List<com.google.showcase.v1beta1.Session> getSessionsList() { return sessions_; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.showcase.v1beta1.SessionOrBuilder> getSessionsOrBuilderList() { return sessions_; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ @java.lang.Override public int getSessionsCount() { return sessions_.size(); } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.Session getSessions(int index) { return sessions_.get(index); } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.SessionOrBuilder getSessionsOrBuilder(int index) { return sessions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < sessions_.size(); i++) { output.writeMessage(1, sessions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < sessions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sessions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.showcase.v1beta1.ListSessionsResponse)) { return super.equals(obj); } com.google.showcase.v1beta1.ListSessionsResponse other = (com.google.showcase.v1beta1.ListSessionsResponse) obj; if (!getSessionsList().equals(other.getSessionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSessionsCount() > 0) { hash = (37 * hash) + SESSIONS_FIELD_NUMBER; hash = (53 * hash) + getSessionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ListSessionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListSessionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.ListSessionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.showcase.v1beta1.ListSessionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for the ListSessions method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.ListSessionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.ListSessionsResponse) com.google.showcase.v1beta1.ListSessionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.TestingOuterClass .internal_static_google_showcase_v1beta1_ListSessionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.TestingOuterClass .internal_static_google_showcase_v1beta1_ListSessionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.ListSessionsResponse.class, com.google.showcase.v1beta1.ListSessionsResponse.Builder.class); } // Construct using com.google.showcase.v1beta1.ListSessionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (sessionsBuilder_ == null) { sessions_ = java.util.Collections.emptyList(); } else { sessions_ = null; sessionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.showcase.v1beta1.TestingOuterClass .internal_static_google_showcase_v1beta1_ListSessionsResponse_descriptor; } @java.lang.Override public com.google.showcase.v1beta1.ListSessionsResponse getDefaultInstanceForType() { return com.google.showcase.v1beta1.ListSessionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.showcase.v1beta1.ListSessionsResponse build() { com.google.showcase.v1beta1.ListSessionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.showcase.v1beta1.ListSessionsResponse buildPartial() { com.google.showcase.v1beta1.ListSessionsResponse result = new com.google.showcase.v1beta1.ListSessionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.showcase.v1beta1.ListSessionsResponse result) { if (sessionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { sessions_ = java.util.Collections.unmodifiableList(sessions_); bitField0_ = (bitField0_ & ~0x00000001); } result.sessions_ = sessions_; } else { result.sessions_ = sessionsBuilder_.build(); } } private void buildPartial0(com.google.showcase.v1beta1.ListSessionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.showcase.v1beta1.ListSessionsResponse) { return mergeFrom((com.google.showcase.v1beta1.ListSessionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.showcase.v1beta1.ListSessionsResponse other) { if (other == com.google.showcase.v1beta1.ListSessionsResponse.getDefaultInstance()) return this; if (sessionsBuilder_ == null) { if (!other.sessions_.isEmpty()) { if (sessions_.isEmpty()) { sessions_ = other.sessions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSessionsIsMutable(); sessions_.addAll(other.sessions_); } onChanged(); } } else { if (!other.sessions_.isEmpty()) { if (sessionsBuilder_.isEmpty()) { sessionsBuilder_.dispose(); sessionsBuilder_ = null; sessions_ = other.sessions_; bitField0_ = (bitField0_ & ~0x00000001); sessionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSessionsFieldBuilder() : null; } else { sessionsBuilder_.addAllMessages(other.sessions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.showcase.v1beta1.Session m = input.readMessage( com.google.showcase.v1beta1.Session.parser(), extensionRegistry); if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.add(m); } else { sessionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.showcase.v1beta1.Session> sessions_ = java.util.Collections.emptyList(); private void ensureSessionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { sessions_ = new java.util.ArrayList<com.google.showcase.v1beta1.Session>(sessions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Session, com.google.showcase.v1beta1.Session.Builder, com.google.showcase.v1beta1.SessionOrBuilder> sessionsBuilder_; /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.Session> getSessionsList() { if (sessionsBuilder_ == null) { return java.util.Collections.unmodifiableList(sessions_); } else { return sessionsBuilder_.getMessageList(); } } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public int getSessionsCount() { if (sessionsBuilder_ == null) { return sessions_.size(); } else { return sessionsBuilder_.getCount(); } } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public com.google.showcase.v1beta1.Session getSessions(int index) { if (sessionsBuilder_ == null) { return sessions_.get(index); } else { return sessionsBuilder_.getMessage(index); } } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder setSessions(int index, com.google.showcase.v1beta1.Session value) { if (sessionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSessionsIsMutable(); sessions_.set(index, value); onChanged(); } else { sessionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder setSessions( int index, com.google.showcase.v1beta1.Session.Builder builderForValue) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.set(index, builderForValue.build()); onChanged(); } else { sessionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder addSessions(com.google.showcase.v1beta1.Session value) { if (sessionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSessionsIsMutable(); sessions_.add(value); onChanged(); } else { sessionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder addSessions(int index, com.google.showcase.v1beta1.Session value) { if (sessionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSessionsIsMutable(); sessions_.add(index, value); onChanged(); } else { sessionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder addSessions(com.google.showcase.v1beta1.Session.Builder builderForValue) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.add(builderForValue.build()); onChanged(); } else { sessionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder addSessions( int index, com.google.showcase.v1beta1.Session.Builder builderForValue) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.add(index, builderForValue.build()); onChanged(); } else { sessionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder addAllSessions( java.lang.Iterable<? extends com.google.showcase.v1beta1.Session> values) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sessions_); onChanged(); } else { sessionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder clearSessions() { if (sessionsBuilder_ == null) { sessions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { sessionsBuilder_.clear(); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public Builder removeSessions(int index) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.remove(index); onChanged(); } else { sessionsBuilder_.remove(index); } return this; } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public com.google.showcase.v1beta1.Session.Builder getSessionsBuilder(int index) { return getSessionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public com.google.showcase.v1beta1.SessionOrBuilder getSessionsOrBuilder(int index) { if (sessionsBuilder_ == null) { return sessions_.get(index); } else { return sessionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public java.util.List<? extends com.google.showcase.v1beta1.SessionOrBuilder> getSessionsOrBuilderList() { if (sessionsBuilder_ != null) { return sessionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(sessions_); } } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public com.google.showcase.v1beta1.Session.Builder addSessionsBuilder() { return getSessionsFieldBuilder() .addBuilder(com.google.showcase.v1beta1.Session.getDefaultInstance()); } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public com.google.showcase.v1beta1.Session.Builder addSessionsBuilder(int index) { return getSessionsFieldBuilder() .addBuilder(index, com.google.showcase.v1beta1.Session.getDefaultInstance()); } /** * * * <pre> * The sessions being returned. * </pre> * * <code>repeated .google.showcase.v1beta1.Session sessions = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.Session.Builder> getSessionsBuilderList() { return getSessionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Session, com.google.showcase.v1beta1.Session.Builder, com.google.showcase.v1beta1.SessionOrBuilder> getSessionsFieldBuilder() { if (sessionsBuilder_ == null) { sessionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Session, com.google.showcase.v1beta1.Session.Builder, com.google.showcase.v1beta1.SessionOrBuilder>( sessions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); sessions_ = null; } return sessionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The next page token, if any. * An empty value here means the last page has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.ListSessionsResponse) } // @@protoc_insertion_point(class_scope:google.showcase.v1beta1.ListSessionsResponse) private static final com.google.showcase.v1beta1.ListSessionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.showcase.v1beta1.ListSessionsResponse(); } public static com.google.showcase.v1beta1.ListSessionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSessionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListSessionsResponse>() { @java.lang.Override public ListSessionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSessionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSessionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.showcase.v1beta1.ListSessionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/nosql
35,310
kvmain/src/main/java/com/sleepycat/je/Transaction.java
/*- * Copyright (C) 2002, 2025, Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package com.sleepycat.je; import java.util.concurrent.TimeUnit; import com.sleepycat.je.Durability.ReplicaAckPolicy; import com.sleepycat.je.dbi.EnvironmentImpl; import com.sleepycat.je.txn.Locker; import com.sleepycat.je.txn.Txn; import com.sleepycat.je.utilint.PropUtil; /** * The Transaction object is the handle for a transaction. Methods off the * transaction handle are used to configure, abort and commit the transaction. * Transaction handles are provided to other Berkeley DB methods in order to * transactionally protect those operations. * * <p>A single Transaction may be used to protect operations for any number of * Databases in a given environment. However, a single Transaction may not be * used for operations in more than one distinct environment.</p> * * <p>Transaction handles are free-threaded; transactions handles may be used * concurrently by multiple threads. Once the {@link Transaction#abort * Transaction.abort} or {@link Transaction#commit Transaction.commit} method * is called, the handle may not be accessed again, regardless of the success * or failure of the method, with one exception: the {@code abort} method may * be called any number of times to simplify error handling.</p> * * <p>To obtain a transaction with default attributes:</p> * * <blockquote><pre> * Transaction txn = myEnvironment.beginTransaction(null, null); * </pre></blockquote> * * <p>To customize the attributes of a transaction:</p> * * <blockquote><pre> * TransactionConfig config = new TransactionConfig(); * config.setReadUncommitted(true); * Transaction txn = myEnvironment.beginTransaction(null, config); * </pre></blockquote> */ public class Transaction { /** * The current state of the transaction. * * @since 5.0.48 */ public enum State { /** * The transaction has not been committed or aborted, and can be used * for performing operations. This state is also indicated if {@link * #isValid} returns true. For all other states, {@link #isValid} will * return false. */ OPEN, /** * An exception was thrown by the {@code commit} method due to an error * that occurred while attempting to make the transaction durable. The * transaction may or may not be locally durable, according to the * {@link Durability#getLocalSync local SyncPolicy} requested. * <p> * This is an unusual situation and is normally due to a system * failure, storage device failure, disk full condition, thread * interrupt, or a bug of some kind. When a transaction is in this * state, the Environment will have been {@link Environment#isValid() * invalidated} by the error. * <p> * In a replicated environment, a transaction in this state is not * transferred to replicas. If it turns out that the transaction is * indeed durable, it will be transferred to replicas via normal * replication mechanisms when the Environment is re-opened. * <p> * When the {@code commit} method throws an exception and the * transaction is in the {@code POSSIBLY_COMMITTED} state, some * applications may wish to perform a data query to determine whether * the transaction is durable or not. Note that in the event of a * system level failure, the reads themselves may be unreliable, e.g. * the data may be in the file system cache but not on disk. Other * applications may wish to repeat the transaction unconditionally, * after resolving the error condition, particularly when the set of * operations in the transaction is designed to be idempotent. */ POSSIBLY_COMMITTED, /** * The transaction has been committed and is locally durable according * to the {@link Durability#getLocalSync local SyncPolicy} requested. * <p> * Note that a transaction may be in this state even when an exception * is thrown by the {@code commit} method. For example, in a * replicated environment, an {@link * com.sleepycat.je.rep.InsufficientAcksException} may be thrown after * the transaction is committed locally. */ COMMITTED, /** * The transaction has been invalidated by an exception and cannot be * committed. See {@link OperationFailureException} for a description * of how a transaction can become invalid. The application is * responsible for aborting the transaction. */ MUST_ABORT, /** * The transaction has been aborted. */ ABORTED, } private Txn txn; private final Environment env; private final long id; private String name; /* * It's set upon a successful updating replicated commit and identifies the * VLSN associated with the commit entry. */ private CommitToken commitToken = null; /* * Is null until setTxnNull is called, and then it holds the state at the * time the txn was closed. */ private State finalState = null; /* * Commit and abort methods are synchronized to prevent them from running * concurrently with operations using the transaction. See * Cursor.getTxnSynchronizer. */ /** * For internal use. * @hidden * Creates a transaction. */ protected Transaction(Environment env, Txn txn) { this.env = env; this.txn = txn; txn.setTransaction(this); /* * Copy the id to this wrapper object so the id will be available * after the transaction is closed and the txn field is nulled. */ this.id = txn.getId(); } /** * Cause an abnormal termination of the transaction. * * <p>The log is played backward, and any necessary undo operations are * done. Before Transaction.abort returns, any locks held by the * transaction will have been released.</p> * * <p>In the case of nested transactions, aborting a parent transaction * causes all children (unresolved or not) of the parent transaction to be * aborted.</p> * * <p>All cursors opened within the transaction must be closed before the * transaction is aborted.</p> * * <p>After this method has been called, regardless of its return, the * {@link Transaction} handle may not be accessed again, with one * exception: the {@code abort} method itself may be called any number of * times to simplify error handling.</p> * * <p>WARNING: To guard against memory leaks, the application should * discard all references to the closed handle. While BDB makes an effort * to discard references from closed objects to the allocated memory for an * environment, this behavior is not guaranteed. The safe course of action * for an application is to discard all references to closed BDB * objects.</p> * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the environment has been closed, or * cursors associated with the transaction are still open. */ public synchronized void abort() throws DatabaseException { try { /* * If the transaction is already closed, do nothing. Do not call * checkOpen in order to support any number of calls to abort(). */ if (txn == null) { return; } /* * Check env only after checking for closed txn, to mimic close() * behavior for Cursors, etc, and avoid unnecessary exception * handling. [#21264] */ checkEnv(); env.removeReferringHandle(this); txn.abort(); /* Remove reference to internal txn, so we can reclaim memory. */ setTxnNull(); } catch (Error E) { DbInternal.getNonNullEnvImpl(env).invalidate(E); throw E; } } /** * Return the transaction's unique ID. * * @return The transaction's unique ID. */ public long getId() { return id; } /** * This method is intended for use with a replicated environment. * <p> * It returns the commitToken associated with a successful replicated * commit. A null value is returned if the txn was not associated with a * replicated environment, or the txn did not result in any changes to the * environment. This method should only be called after the transaction * has finished. * <p> * This method is typically used in conjunction with the <code> * CommitPointConsistencyPolicy</code>. * * @return the token used to identify the replicated commit. Return null if * the transaction has aborted, or has committed without making any * updates. * * @throws IllegalStateException if the method is called before the * transaction has committed or aborted. * * @see com.sleepycat.je.rep.CommitPointConsistencyPolicy */ public CommitToken getCommitToken() throws IllegalStateException { if (txn == null) { /* * The commit token is only legitimate after the transaction is * closed. A null txn field means the transaction is closed. */ return commitToken; } throw new IllegalStateException ("This transaction is still in progress and a commit token " + "is not available"); } /** * End the transaction. If the environment is configured for synchronous * commit, the transaction will be committed synchronously to stable * storage before the call returns. This means the transaction will * exhibit all of the ACID (atomicity, consistency, isolation, and * durability) properties. * * <p>If the environment is not configured for synchronous commit, the * commit will not necessarily have been committed to stable storage before * the call returns. This means the transaction will exhibit the ACI * (atomicity, consistency, and isolation) properties, but not D * (durability); that is, database integrity will be maintained, but it is * possible this transaction may be undone during recovery.</p> * * <p>All cursors opened within the transaction must be closed before the * transaction is committed.</p> * * <p>If the method encounters an error, the transaction <!-- and all child * transactions of the transaction --> will have been aborted when the call * returns.</p> * * <p>After this method has been called, regardless of its return, the * {@link Transaction} handle may not be accessed again, with one * exception: the {@code abort} method may be called any number of times * to simplify error handling.</p> * * <p>WARNING: To guard against memory leaks, the application should * discard all references to the closed handle. While BDB makes an effort * to discard references from closed objects to the allocated memory for an * environment, this behavior is not guaranteed. The safe course of action * for an application is to discard all references to closed BDB * objects.</p> * * @throws com.sleepycat.je.rep.InsufficientReplicasException if the master * in a replicated environment could not contact a quorum of replicas as * determined by the {@link ReplicaAckPolicy}. * * @throws com.sleepycat.je.rep.InsufficientAcksException if the master in * a replicated environment did not receive enough replica acknowledgments, * although the commit succeeded locally. * * @throws com.sleepycat.je.rep.ReplicaWriteException if a write operation * was performed with this transaction, but this node is now a Replica. * * @throws OperationFailureException if this exception occurred earlier and * caused the transaction to be invalidated. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed, or cursors associated with the transaction are still open. */ public synchronized void commit() throws DatabaseException { try { checkEnv(); checkOpen(); env.removeReferringHandle(this); txn.commit(); commitToken = txn.getCommitToken(); /* Remove reference to internal txn, so we can reclaim memory. */ setTxnNull(); } catch (Error E) { DbInternal.getNonNullEnvImpl(env).invalidate(E); throw E; } } /** * End the transaction using the specified durability requirements. This * requirement overrides any default durability requirements associated * with the environment. If the durability requirements cannot be satisfied, * an exception is thrown to describe the problem. Please see * {@link Durability} for specific exceptions that could result when the * durability requirements cannot be satisfied. * * <p>All cursors opened within the transaction must be closed before the * transaction is committed.</p> * * <p>If the method encounters an error, the transaction <!-- and all child * transactions of the transaction --> will have been aborted when the call * returns.</p> * * <p>After this method has been called, regardless of its return, the * {@link Transaction} handle may not be accessed again, with one * exception: the {@code abort} method may be called any number of times * to simplify error handling.</p> * * <p>WARNING: To guard against memory leaks, the application should * discard all references to the closed handle. While BDB makes an effort * to discard references from closed objects to the allocated memory for an * environment, this behavior is not guaranteed. The safe course of action * for an application is to discard all references to closed BDB * objects.</p> * * @param durability the durability requirements for this transaction * * @throws com.sleepycat.je.rep.InsufficientReplicasException if the master * in a replicated environment could not contact enough replicas to * initiate the commit. * * @throws com.sleepycat.je.rep.InsufficientAcksException if the master in * a replicated environment did not receive enough replica acknowledgments, * althought the commit succeeded locally. * * @throws com.sleepycat.je.rep.ReplicaWriteException if a write operation * was performed with this transaction, but this node is now a Replica. * * @throws OperationFailureException if this exception occurred earlier and * caused the transaction to be invalidated. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed, or cursors associated with the transaction are still open. * * @throws IllegalArgumentException if an invalid parameter is specified. */ public synchronized void commit(Durability durability) throws DatabaseException { doCommit(durability); } /** * End the transaction, writing to stable storage and committing * synchronously. This means the transaction will exhibit all of the ACID * (atomicity, consistency, isolation, and durability) properties. * * <p>This behavior is the default for database environments unless * otherwise configured using the {@link * com.sleepycat.je.EnvironmentConfig#setDurability} * EnvironmentConfig.setTxnNoSync} method. This behavior may also be set * for a single transaction using the {@link * com.sleepycat.je.Environment#beginTransaction * Environment.beginTransaction} method. Any value specified to this * method overrides both of those settings.</p> * * <p>All cursors opened within the transaction must be closed before the * transaction is committed.</p> * * <p>If the method encounters an error, the transaction <!-- and all child * transactions of the transaction --> will have been aborted when the call * returns.</p> * * <p>After this method has been called, regardless of its return, the * {@link Transaction} handle may not be accessed again, with one * exception: the {@code abort} method may be called any number of times * to simplify error handling.</p> * * <p>WARNING: To guard against memory leaks, the application should * discard all references to the closed handle. While BDB makes an effort * to discard references from closed objects to the allocated memory for an * environment, this behavior is not guaranteed. The safe course of action * for an application is to discard all references to closed BDB * objects.</p> * * @throws com.sleepycat.je.rep.InsufficientReplicasException if the master * in a replicated environment could not contact enough replicas to * initiate the commit. * * @throws com.sleepycat.je.rep.InsufficientAcksException if the master in * a replicated environment did not receive enough replica acknowledgments, * althought the commit succeeded locally. * * @throws com.sleepycat.je.rep.ReplicaWriteException if a write operation * was performed with this transaction, but this node is now a Replica. * * @throws OperationFailureException if this exception occurred earlier and * caused the transaction to be invalidated. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed, or cursors associated with the transaction are still open. */ public synchronized void commitSync() throws DatabaseException { doCommit(Durability.COMMIT_SYNC); } /** * End the transaction, not writing to stable storage and not committing * synchronously. This means the transaction will exhibit the ACI * (atomicity, consistency, and isolation) properties, but not D * (durability); that is, database integrity will be maintained, but it is * possible this transaction may be undone during recovery. * * <p>This behavior may be set for a database environment using the {@link * com.sleepycat.je.EnvironmentConfig#setDurability} * EnvironmentConfig.setTxnNoSync} method or for a single transaction using * the {@link com.sleepycat.je.Environment#beginTransaction * Environment.beginTransaction} method. Any value specified to this * method overrides both of those settings.</p> * * <p>All cursors opened within the transaction must be closed before the * transaction is committed.</p> * * <p>If the method encounters an error, the transaction <!-- and all child * transactions of the transaction --> will have been aborted when the call * returns.</p> * * <p>After this method has been called, regardless of its return, the * {@link Transaction} handle may not be accessed again, with one * exception: the {@code abort} method may be called any number of times * to simplify error handling.</p> * * <p>WARNING: To guard against memory leaks, the application should * discard all references to the closed handle. While BDB makes an effort * to discard references from closed objects to the allocated memory for an * environment, this behavior is not guaranteed. The safe course of action * for an application is to discard all references to closed BDB * objects.</p> * * @throws com.sleepycat.je.rep.InsufficientReplicasException if the master * in a replicated environment could not contact enough replicas to * initiate the commit. * * @throws com.sleepycat.je.rep.InsufficientAcksException if the master in * a replicated environment did not receive enough replica acknowledgments, * althought the commit succeeded locally. * * @throws com.sleepycat.je.rep.ReplicaWriteException if a write operation * was performed with this transaction, but this node is now a Replica. * * @throws OperationFailureException if this exception occurred earlier and * caused the transaction to be invalidated. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed, or cursors associated with the transaction are still open. */ public synchronized void commitNoSync() throws DatabaseException { doCommit(Durability.COMMIT_NO_SYNC); } /** * End the transaction, writing to stable storage but not committing * synchronously. This means the transaction will exhibit the ACI * (atomicity, consistency, and isolation) properties, but not D * (durability); that is, database integrity will be maintained, but it is * possible this transaction may be undone during recovery. * * <p>This behavior is the default for database environments unless * otherwise configured using the {@link * com.sleepycat.je.EnvironmentConfig#setDurability * EnvironmentConfig.setDurability} method. This behavior may also be set * for a single transaction using the {@link * com.sleepycat.je.Environment#beginTransaction * Environment.beginTransaction} method. Any value specified to this * method overrides both of those settings.</p> * * <p>All cursors opened within the transaction must be closed before the * transaction is committed.</p> * * <p>If the method encounters an error, the transaction <!-- and all child * transactions of the transaction --> will have been aborted when the call * returns.</p> * * <p>After this method has been called, regardless of its return, the * {@link Transaction} handle may not be accessed again, with one * exception: the {@code abort} method may be called any number of times * to simplify error handling.</p> * * <p>WARNING: To guard against memory leaks, the application should * discard all references to the closed handle. While BDB makes an effort * to discard references from closed objects to the allocated memory for an * environment, this behavior is not guaranteed. The safe course of action * for an application is to discard all references to closed BDB * objects.</p> * * @throws com.sleepycat.je.rep.InsufficientReplicasException if the master * in a replicated environment could not contact enough replicas to * initiate the commit. * * @throws com.sleepycat.je.rep.InsufficientAcksException if the master in * a replicated environment did not receive enough replica acknowledgments, * althought the commit succeeded locally. * * @throws com.sleepycat.je.rep.ReplicaWriteException if a write operation * was performed with this transaction, but this node is now a Replica. * * @throws OperationFailureException if this exception occurred earlier and * caused the transaction to be invalidated. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed, or cursors associated with the transaction are still open. */ public synchronized void commitWriteNoSync() throws DatabaseException { doCommit(Durability.COMMIT_WRITE_NO_SYNC); } /** * Perform error checking and invoke the commit on Txn. * * @param durability the durability to use for the commit * * @throws IllegalArgumentException via commit(Durability) */ private void doCommit(Durability durability) { try { checkEnv(); checkOpen(); env.removeReferringHandle(this); txn.commit(durability); commitToken = txn.getCommitToken(); /* Remove reference to internal txn, so we can reclaim memory. */ setTxnNull(); } catch (Error E) { DbInternal.getNonNullEnvImpl(env).invalidate(E); throw E; } } /** * Returns the timeout value for the transaction lifetime. * * <p>If {@link #setTxnTimeout(long,TimeUnit)} has been called to configure * the timeout, that value is returned. Otherwise, if {@link * TransactionConfig#setTxnTimeout} was called on the {@code * TransactionConfig} specified when creating the transaction, then that * value is returned. Otherwise, the environment configuration value * ({@link EnvironmentConfig#TXN_TIMEOUT}) is returned.</p> * * @param unit the {@code TimeUnit} of the returned value. May not be null. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed. * * @throws IllegalArgumentException if the unit is null. * * @since 4.0 */ public long getTxnTimeout(TimeUnit unit) throws EnvironmentFailureException, IllegalStateException, IllegalArgumentException { checkEnv(); checkOpen(); return PropUtil.millisToDuration((int) txn.getTxnTimeout(), unit); } /** * Configures the timeout value for the transaction lifetime. * * <p>If the transaction runs longer than this time, an operation using the * transaction may throw {@link TransactionTimeoutException}. The * transaction timeout is checked when locking a record, as part of a read * or write operation.</p> * * <p>A value of zero (which is the default) disables timeouts for the * transaction, meaning that no limit on the duration of the transaction is * enforced. Note that the {@link #setLockTimeout(long, TimeUnit)} lock * timeout} is independent of the transaction timeout, and the lock timeout * should not normally be set to zero.</p> * * @param timeOut The timeout value for the transaction lifetime, or zero * to disable transaction timeouts. * * @param unit the {@code TimeUnit} of the timeOut value. May be null only * if timeOut is zero. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed. * * @throws IllegalArgumentException if timeOut or unit is invalid. * * @since 4.0 */ public void setTxnTimeout(long timeOut, TimeUnit unit) throws IllegalArgumentException, DatabaseException { checkEnv(); checkOpen(); txn.setTxnTimeout(PropUtil.durationToMillis(timeOut, unit)); } /** * Returns the lock request timeout value for the transaction. * * <p>If {@link #setLockTimeout(long,TimeUnit)} has not been called to * configure the timeout, the environment configuration value ({@link * EnvironmentConfig#LOCK_TIMEOUT}) is returned.</p> * * @param unit the {@code TimeUnit} of the returned value. May not be null. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed. * * @throws IllegalArgumentException if the unit is null. * * @since 4.0 */ public long getLockTimeout(TimeUnit unit) throws EnvironmentFailureException, IllegalStateException, IllegalArgumentException { checkEnv(); checkOpen(); return PropUtil.millisToDuration((int) txn.getLockTimeout(), unit); } /** * Configures the lock request timeout value for the transaction. This * overrides the {@link EnvironmentConfig#setLockTimeout(long, TimeUnit) * default lock timeout}. * * <p>A value of zero disables lock timeouts. This is not recommended, even * when the application expects that deadlocks will not occur or will be * easily resolved. A lock timeout is a fall-back that guards against * unexpected "live lock", unresponsive threads, or application failure to * close a cursor or to commit or abort a transaction.</p> * * @param timeOut The lock timeout for all transactional and * non-transactional operations, or zero to disable lock timeouts. * * @param unit the {@code TimeUnit} of the timeOut value. May be null only * if timeOut is zero. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * * @throws IllegalStateException if the transaction or environment has been * closed. * * @throws IllegalArgumentException if timeOut or unit is invalid. * * @since 4.0 */ public void setLockTimeout(long timeOut, TimeUnit unit) throws IllegalArgumentException, DatabaseException { checkEnv(); checkOpen(); txn.setLockTimeout(PropUtil.durationToMillis(timeOut, unit)); } /** * Set the user visible name for the transaction. * * @param name The user visible name for the transaction. */ public void setName(String name) { this.name = name; } /** * Get the user visible name for the transaction. * * @return The user visible name for the transaction. */ public String getName() { return name; } /** * For internal use. * @hidden */ @Override public int hashCode() { return (int) id; } /** * For internal use. * @hidden */ @Override public boolean equals(Object o) { if (o == null) { return false; } if (!(o instanceof Transaction)) { return false; } if (((Transaction) o).id == id) { return true; } return false; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("<Transaction id=\""); sb.append(id).append("\""); if (name != null) { sb.append(" name=\""); sb.append(name).append("\""); } sb.append(">"); return sb.toString(); } /** * This method should only be called by the LockerFactory.getReadableLocker * and getWritableLocker methods. The locker returned does not enforce the * readCommitted isolation setting. * * @throws IllegalArgumentException via all API methods with a txn param */ Locker getLocker() throws DatabaseException { if (txn == null) { throw new IllegalArgumentException ("Transaction " + id + " has been closed and is no longer usable."); } return txn; } /* * Helpers */ Txn getTxn() { return txn; } public Environment getEnvironment() { return env; } /** * @throws EnvironmentFailureException if the underlying environment is * invalid, via all methods. * * @throws IllegalStateException via all methods. */ private void checkEnv() { EnvironmentImpl envImpl = env.getNonNullEnvImpl(); if (envImpl == null) { throw new IllegalStateException ("The environment has been closed. " + "This transaction is no longer usable."); } envImpl.checkIfInvalid(); } /** * @throws IllegalStateException via all methods except abort. */ void checkOpen() { if (txn == null || txn.isClosed()) { throw new IllegalStateException("Transaction Id " + id + " has been closed."); } } /** * Returns whether this {@code Transaction} is open, which is equivalent * to when {@link Transaction#getState} returns {@link * Transaction.State#OPEN}. See {@link Transaction.State#OPEN} for more * information. * * <p>When an {@link OperationFailureException}, or one of its subclasses, * is caught, the {@code isValid} method may be called to determine whether * the {@code Transaction} can continue to be used, or should be * aborted.</p> */ public boolean isValid() { return txn != null && txn.isValid(); } /** * Remove reference to internal txn, so we can reclaim memory. Before * setting it null, save the final State value, so we can return it from * getState. */ private void setTxnNull() { finalState = txn.getState(); txn = null; } /** * Returns the current state of the transaction. * * @since 5.0.48 */ public State getState() { if (txn != null) { assert finalState == null; return txn.getState(); } else { assert finalState != null; return finalState; } } /** * Nulls-out indirect references to the environment, to allow GC. * <p> * The app may hold the Transaction references longer than expected. In * particular during an Environment re-open we need to give GC a fighting * chance while handles from two environments are temporarily referenced. * <p> * Note that this is needed even when the txn or env is invalid. */ synchronized void minimalClose() { if (txn != null) { setTxnNull(); } } }
googleapis/google-api-java-client-services
35,104
clients/google-api-services-chromewebstore/v1.1/2.0.0/com/google/api/services/chromewebstore/v1_1/Chromewebstore.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.chromewebstore.v1_1; /** * Service definition for Chromewebstore (v1.1). * * <p> * The Chrome Web Store API provides access to data about apps and extensions, as well as developer tools for managing them. * </p> * * <p> * For more information about this service, see the * <a href="https://developer.chrome.com/webstore/api_index" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link ChromewebstoreRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class Chromewebstore extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( (com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 || (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 && com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1))) || com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION >= 2, "You are currently running with version %s of google-api-client. " + "You need at least version 1.31.1 of google-api-client to run version " + "2.0.0 of the Chrome Web Store API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://chromewebstore.googleapis.com/"; /** * The default encoded mTLS root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.31 */ public static final String DEFAULT_MTLS_ROOT_URL = "https://chromewebstore.mtls.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Chromewebstore(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ Chromewebstore(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Items collection. * * <p>The typical use is:</p> * <pre> * {@code Chromewebstore chromewebstore = new Chromewebstore(...);} * {@code Chromewebstore.Items.List request = chromewebstore.items().list(parameters ...)} * </pre> * * @return the resource collection */ public Items items() { return new Items(); } /** * The "items" collection of methods. */ public class Items { /** * Gets your own Chrome Web Store item. * * Create a request for the method "items.get". * * This request holds the parameters needed by the chromewebstore server. After setting any * optional parameters, call the {@link Get#execute()} method to invoke the remote operation. * * @param itemId Unique identifier representing the Chrome App, Chrome Extension, or the Chrome Theme. * @return the request */ public Get get(java.lang.String itemId) throws java.io.IOException { Get result = new Get(itemId); initialize(result); return result; } public class Get extends ChromewebstoreRequest<com.google.api.services.chromewebstore.v1_1.model.Item> { private static final String REST_PATH = "chromewebstore/v1.1/items/{itemId}"; /** * Gets your own Chrome Web Store item. * * Create a request for the method "items.get". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> * {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param itemId Unique identifier representing the Chrome App, Chrome Extension, or the Chrome Theme. * @since 1.13 */ protected Get(java.lang.String itemId) { super(Chromewebstore.this, "GET", REST_PATH, null, com.google.api.services.chromewebstore.v1_1.model.Item.class); this.itemId = com.google.api.client.util.Preconditions.checkNotNull(itemId, "Required parameter itemId must be specified."); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public Get set$Xgafv(java.lang.String $Xgafv) { return (Get) super.set$Xgafv($Xgafv); } @Override public Get setAccessToken(java.lang.String accessToken) { return (Get) super.setAccessToken(accessToken); } @Override public Get setAlt(java.lang.String alt) { return (Get) super.setAlt(alt); } @Override public Get setCallback(java.lang.String callback) { return (Get) super.setCallback(callback); } @Override public Get setFields(java.lang.String fields) { return (Get) super.setFields(fields); } @Override public Get setKey(java.lang.String key) { return (Get) super.setKey(key); } @Override public Get setOauthToken(java.lang.String oauthToken) { return (Get) super.setOauthToken(oauthToken); } @Override public Get setPrettyPrint(java.lang.Boolean prettyPrint) { return (Get) super.setPrettyPrint(prettyPrint); } @Override public Get setQuotaUser(java.lang.String quotaUser) { return (Get) super.setQuotaUser(quotaUser); } @Override public Get setUploadType(java.lang.String uploadType) { return (Get) super.setUploadType(uploadType); } @Override public Get setUploadProtocol(java.lang.String uploadProtocol) { return (Get) super.setUploadProtocol(uploadProtocol); } /** Unique identifier representing the Chrome App, Chrome Extension, or the Chrome Theme. */ @com.google.api.client.util.Key private java.lang.String itemId; /** Unique identifier representing the Chrome App, Chrome Extension, or the Chrome Theme. */ public java.lang.String getItemId() { return itemId; } /** Unique identifier representing the Chrome App, Chrome Extension, or the Chrome Theme. */ public Get setItemId(java.lang.String itemId) { this.itemId = itemId; return this; } /** Determines which subset of the item information to return. */ @com.google.api.client.util.Key private java.lang.String projection; /** Determines which subset of the item information to return. */ public java.lang.String getProjection() { return projection; } /** Determines which subset of the item information to return. */ public Get setProjection(java.lang.String projection) { this.projection = projection; return this; } @Override public Get set(String parameterName, Object value) { return (Get) super.set(parameterName, value); } } /** * Inserts a new item. * * Create a request for the method "items.insert". * * This request holds the parameters needed by the chromewebstore server. After setting any * optional parameters, call the {@link Insert#execute()} method to invoke the remote operation. * * @return the request */ public Insert insert() throws java.io.IOException { Insert result = new Insert(); initialize(result); return result; } /** * Inserts a new item. * * Create a request for the method "items.insert". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Insert#execute()} method to invoke the remote operation. * * <p> * This method should be used for uploading media content. * </p> * * * @param mediaContent The media HTTP content. * @return the request * @throws java.io.IOException if the initialization of the request fails */ public Insert insert(com.google.api.client.http.AbstractInputStreamContent mediaContent) throws java.io.IOException { Insert result = new Insert(mediaContent); initialize(result); return result; } public class Insert extends ChromewebstoreRequest<com.google.api.services.chromewebstore.v1_1.model.Item> { private static final String REST_PATH = "chromewebstore/v1.1/items"; /** * Inserts a new item. * * Create a request for the method "items.insert". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Insert#execute()} method to invoke the remote operation. * <p> {@link * Insert#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @since 1.13 */ protected Insert() { super(Chromewebstore.this, "POST", REST_PATH, null, com.google.api.services.chromewebstore.v1_1.model.Item.class); } /** * Inserts a new item. * * Create a request for the method "items.insert". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Insert#execute()} method to invoke the remote operation. * <p> {@link * Insert#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * <p> * This constructor should be used for uploading media content. * </p> * * * @param mediaContent The media HTTP content. * @since 1.13 */ protected Insert(com.google.api.client.http.AbstractInputStreamContent mediaContent) { super(Chromewebstore.this, "POST", "/upload/" + getServicePath() + REST_PATH, null, com.google.api.services.chromewebstore.v1_1.model.Item.class); com.google.api.client.util.Preconditions.checkNotNull(mediaContent, "Required parameter mediaContent must be specified."); initializeMediaUpload(mediaContent); } @Override public Insert set$Xgafv(java.lang.String $Xgafv) { return (Insert) super.set$Xgafv($Xgafv); } @Override public Insert setAccessToken(java.lang.String accessToken) { return (Insert) super.setAccessToken(accessToken); } @Override public Insert setAlt(java.lang.String alt) { return (Insert) super.setAlt(alt); } @Override public Insert setCallback(java.lang.String callback) { return (Insert) super.setCallback(callback); } @Override public Insert setFields(java.lang.String fields) { return (Insert) super.setFields(fields); } @Override public Insert setKey(java.lang.String key) { return (Insert) super.setKey(key); } @Override public Insert setOauthToken(java.lang.String oauthToken) { return (Insert) super.setOauthToken(oauthToken); } @Override public Insert setPrettyPrint(java.lang.Boolean prettyPrint) { return (Insert) super.setPrettyPrint(prettyPrint); } @Override public Insert setQuotaUser(java.lang.String quotaUser) { return (Insert) super.setQuotaUser(quotaUser); } @Override public Insert setUploadType(java.lang.String uploadType) { return (Insert) super.setUploadType(uploadType); } @Override public Insert setUploadProtocol(java.lang.String uploadProtocol) { return (Insert) super.setUploadProtocol(uploadProtocol); } /** The email of the publisher who owns the items. Defaults to the caller's email address. */ @com.google.api.client.util.Key private java.lang.String publisherEmail; /** The email of the publisher who owns the items. Defaults to the caller's email address. */ public java.lang.String getPublisherEmail() { return publisherEmail; } /** The email of the publisher who owns the items. Defaults to the caller's email address. */ public Insert setPublisherEmail(java.lang.String publisherEmail) { this.publisherEmail = publisherEmail; return this; } @Override public Insert set(String parameterName, Object value) { return (Insert) super.set(parameterName, value); } } /** * Publishes an item. * * Create a request for the method "items.publish". * * This request holds the parameters needed by the chromewebstore server. After setting any * optional parameters, call the {@link Publish#execute()} method to invoke the remote operation. * * @param itemId The ID of the item to publish. * @param content the {@link com.google.api.services.chromewebstore.v1_1.model.PublishRequest} * @return the request */ public Publish publish(java.lang.String itemId, com.google.api.services.chromewebstore.v1_1.model.PublishRequest content) throws java.io.IOException { Publish result = new Publish(itemId, content); initialize(result); return result; } public class Publish extends ChromewebstoreRequest<com.google.api.services.chromewebstore.v1_1.model.Item2> { private static final String REST_PATH = "chromewebstore/v1.1/items/{itemId}/publish"; /** * Publishes an item. * * Create a request for the method "items.publish". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Publish#execute()} method to invoke the remote operation. * <p> {@link * Publish#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param itemId The ID of the item to publish. * @param content the {@link com.google.api.services.chromewebstore.v1_1.model.PublishRequest} * @since 1.13 */ protected Publish(java.lang.String itemId, com.google.api.services.chromewebstore.v1_1.model.PublishRequest content) { super(Chromewebstore.this, "POST", REST_PATH, content, com.google.api.services.chromewebstore.v1_1.model.Item2.class); this.itemId = com.google.api.client.util.Preconditions.checkNotNull(itemId, "Required parameter itemId must be specified."); } @Override public Publish set$Xgafv(java.lang.String $Xgafv) { return (Publish) super.set$Xgafv($Xgafv); } @Override public Publish setAccessToken(java.lang.String accessToken) { return (Publish) super.setAccessToken(accessToken); } @Override public Publish setAlt(java.lang.String alt) { return (Publish) super.setAlt(alt); } @Override public Publish setCallback(java.lang.String callback) { return (Publish) super.setCallback(callback); } @Override public Publish setFields(java.lang.String fields) { return (Publish) super.setFields(fields); } @Override public Publish setKey(java.lang.String key) { return (Publish) super.setKey(key); } @Override public Publish setOauthToken(java.lang.String oauthToken) { return (Publish) super.setOauthToken(oauthToken); } @Override public Publish setPrettyPrint(java.lang.Boolean prettyPrint) { return (Publish) super.setPrettyPrint(prettyPrint); } @Override public Publish setQuotaUser(java.lang.String quotaUser) { return (Publish) super.setQuotaUser(quotaUser); } @Override public Publish setUploadType(java.lang.String uploadType) { return (Publish) super.setUploadType(uploadType); } @Override public Publish setUploadProtocol(java.lang.String uploadProtocol) { return (Publish) super.setUploadProtocol(uploadProtocol); } /** The ID of the item to publish. */ @com.google.api.client.util.Key private java.lang.String itemId; /** The ID of the item to publish. */ public java.lang.String getItemId() { return itemId; } /** The ID of the item to publish. */ public Publish setItemId(java.lang.String itemId) { this.itemId = itemId; return this; } /** * The deploy percentage you want to set for your item. Valid values are [0, 100]. If set to * any number less than 100, only that many percentage of users will be allowed to get the * update. */ @com.google.api.client.util.Key private java.lang.Integer deployPercentage; /** The deploy percentage you want to set for your item. Valid values are [0, 100]. If set to any number less than 100, only that many percentage of users will be allowed to get the update. */ public java.lang.Integer getDeployPercentage() { return deployPercentage; } /** * The deploy percentage you want to set for your item. Valid values are [0, 100]. If set to * any number less than 100, only that many percentage of users will be allowed to get the * update. */ public Publish setDeployPercentage(java.lang.Integer deployPercentage) { this.deployPercentage = deployPercentage; return this; } /** * Provide defined publishTarget in URL (case sensitive): publishTarget="trustedTesters" or * publishTarget="default". Defaults to publishTarget="default". */ @com.google.api.client.util.Key private java.lang.String publishTarget; /** Provide defined publishTarget in URL (case sensitive): publishTarget="trustedTesters" or publishTarget="default". Defaults to publishTarget="default". */ public java.lang.String getPublishTarget() { return publishTarget; } /** * Provide defined publishTarget in URL (case sensitive): publishTarget="trustedTesters" or * publishTarget="default". Defaults to publishTarget="default". */ public Publish setPublishTarget(java.lang.String publishTarget) { this.publishTarget = publishTarget; return this; } /** * Optional. The caller request to exempt the review and directly publish because the update * is within the list that we can automatically validate. The API will check if the exemption * can be granted using real time data. */ @com.google.api.client.util.Key private java.lang.Boolean reviewExemption; /** Optional. The caller request to exempt the review and directly publish because the update is within the list that we can automatically validate. The API will check if the exemption can be granted using real time data. */ public java.lang.Boolean getReviewExemption() { return reviewExemption; } /** * Optional. The caller request to exempt the review and directly publish because the update * is within the list that we can automatically validate. The API will check if the exemption * can be granted using real time data. */ public Publish setReviewExemption(java.lang.Boolean reviewExemption) { this.reviewExemption = reviewExemption; return this; } @Override public Publish set(String parameterName, Object value) { return (Publish) super.set(parameterName, value); } } /** * Updates an existing item. * * Create a request for the method "items.update". * * This request holds the parameters needed by the chromewebstore server. After setting any * optional parameters, call the {@link Update#execute()} method to invoke the remote operation. * * @param itemId The ID of the item to upload. * @param content the {@link com.google.api.services.chromewebstore.v1_1.model.Item} * @return the request */ public Update update(java.lang.String itemId, com.google.api.services.chromewebstore.v1_1.model.Item content) throws java.io.IOException { Update result = new Update(itemId, content); initialize(result); return result; } /** * Updates an existing item. * * Create a request for the method "items.update". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Update#execute()} method to invoke the remote operation. * * <p> * This method should be used for uploading media content. * </p> * * @param itemId The ID of the item to upload. * @param content the {@link com.google.api.services.chromewebstore.v1_1.model.Item} media metadata or {@code null} if none * @param mediaContent The media HTTP content. * @return the request * @throws java.io.IOException if the initialization of the request fails */ public Update update(java.lang.String itemId, com.google.api.services.chromewebstore.v1_1.model.Item content, com.google.api.client.http.AbstractInputStreamContent mediaContent) throws java.io.IOException { Update result = new Update(itemId, content, mediaContent); initialize(result); return result; } public class Update extends ChromewebstoreRequest<com.google.api.services.chromewebstore.v1_1.model.Item> { private static final String REST_PATH = "chromewebstore/v1.1/items/{itemId}"; /** * Updates an existing item. * * Create a request for the method "items.update". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Update#execute()} method to invoke the remote operation. * <p> {@link * Update#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param itemId The ID of the item to upload. * @param content the {@link com.google.api.services.chromewebstore.v1_1.model.Item} * @since 1.13 */ protected Update(java.lang.String itemId, com.google.api.services.chromewebstore.v1_1.model.Item content) { super(Chromewebstore.this, "PUT", REST_PATH, content, com.google.api.services.chromewebstore.v1_1.model.Item.class); this.itemId = com.google.api.client.util.Preconditions.checkNotNull(itemId, "Required parameter itemId must be specified."); } /** * Updates an existing item. * * Create a request for the method "items.update". * * This request holds the parameters needed by the the chromewebstore server. After setting any * optional parameters, call the {@link Update#execute()} method to invoke the remote operation. * <p> {@link * Update#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * <p> * This constructor should be used for uploading media content. * </p> * * @param itemId The ID of the item to upload. * @param content the {@link com.google.api.services.chromewebstore.v1_1.model.Item} media metadata or {@code null} if none * @param mediaContent The media HTTP content. * @since 1.13 */ protected Update(java.lang.String itemId, com.google.api.services.chromewebstore.v1_1.model.Item content, com.google.api.client.http.AbstractInputStreamContent mediaContent) { super(Chromewebstore.this, "PUT", "/upload/" + getServicePath() + REST_PATH, content, com.google.api.services.chromewebstore.v1_1.model.Item.class); this.itemId = com.google.api.client.util.Preconditions.checkNotNull(itemId, "Required parameter itemId must be specified."); com.google.api.client.util.Preconditions.checkNotNull(mediaContent, "Required parameter mediaContent must be specified."); initializeMediaUpload(mediaContent); } @Override public Update set$Xgafv(java.lang.String $Xgafv) { return (Update) super.set$Xgafv($Xgafv); } @Override public Update setAccessToken(java.lang.String accessToken) { return (Update) super.setAccessToken(accessToken); } @Override public Update setAlt(java.lang.String alt) { return (Update) super.setAlt(alt); } @Override public Update setCallback(java.lang.String callback) { return (Update) super.setCallback(callback); } @Override public Update setFields(java.lang.String fields) { return (Update) super.setFields(fields); } @Override public Update setKey(java.lang.String key) { return (Update) super.setKey(key); } @Override public Update setOauthToken(java.lang.String oauthToken) { return (Update) super.setOauthToken(oauthToken); } @Override public Update setPrettyPrint(java.lang.Boolean prettyPrint) { return (Update) super.setPrettyPrint(prettyPrint); } @Override public Update setQuotaUser(java.lang.String quotaUser) { return (Update) super.setQuotaUser(quotaUser); } @Override public Update setUploadType(java.lang.String uploadType) { return (Update) super.setUploadType(uploadType); } @Override public Update setUploadProtocol(java.lang.String uploadProtocol) { return (Update) super.setUploadProtocol(uploadProtocol); } /** The ID of the item to upload. */ @com.google.api.client.util.Key private java.lang.String itemId; /** The ID of the item to upload. */ public java.lang.String getItemId() { return itemId; } /** The ID of the item to upload. */ public Update setItemId(java.lang.String itemId) { this.itemId = itemId; return this; } @Override public Update set(String parameterName, Object value) { return (Update) super.set(parameterName, value); } } } /** * Builder for {@link Chromewebstore}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) { // If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint. // If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS. // Use the regular endpoint for all other cases. String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT"); useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint; if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) { return DEFAULT_MTLS_ROOT_URL; } return DEFAULT_ROOT_URL; } /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, Builder.chooseEndpoint(transport), DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link Chromewebstore}. */ @Override public Chromewebstore build() { return new Chromewebstore(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link ChromewebstoreRequestInitializer}. * * @since 1.12 */ public Builder setChromewebstoreRequestInitializer( ChromewebstoreRequestInitializer chromewebstoreRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(chromewebstoreRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } @Override public Builder setUniverseDomain(String universeDomain) { return (Builder) super.setUniverseDomain(universeDomain); } } }
googleapis/google-cloud-java
34,980
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/Handshake.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1/accountservices.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1; /** * * * <pre> * The current status of establishing of the service. * (for example, pending approval or approved). * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.Handshake} */ public final class Handshake extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.Handshake) HandshakeOrBuilder { private static final long serialVersionUID = 0L; // Use Handshake.newBuilder() to construct. private Handshake(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Handshake() { approvalState_ = 0; actor_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Handshake(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.AccountServicesProto .internal_static_google_shopping_merchant_accounts_v1_Handshake_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.AccountServicesProto .internal_static_google_shopping_merchant_accounts_v1_Handshake_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.Handshake.class, com.google.shopping.merchant.accounts.v1.Handshake.Builder.class); } /** * * * <pre> * The approal state of a handshake. * </pre> * * Protobuf enum {@code google.shopping.merchant.accounts.v1.Handshake.ApprovalState} */ public enum ApprovalState implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unspecified approval status. * </pre> * * <code>APPROVAL_STATE_UNSPECIFIED = 0;</code> */ APPROVAL_STATE_UNSPECIFIED(0), /** * * * <pre> * The service was proposed and is waiting to be confirmed. * </pre> * * <code>PENDING = 1;</code> */ PENDING(1), /** * * * <pre> * Both parties have confirmed the service. * </pre> * * <code>ESTABLISHED = 2;</code> */ ESTABLISHED(2), /** * * * <pre> * The service proposal was rejected. * </pre> * * <code>REJECTED = 3;</code> */ REJECTED(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Unspecified approval status. * </pre> * * <code>APPROVAL_STATE_UNSPECIFIED = 0;</code> */ public static final int APPROVAL_STATE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The service was proposed and is waiting to be confirmed. * </pre> * * <code>PENDING = 1;</code> */ public static final int PENDING_VALUE = 1; /** * * * <pre> * Both parties have confirmed the service. * </pre> * * <code>ESTABLISHED = 2;</code> */ public static final int ESTABLISHED_VALUE = 2; /** * * * <pre> * The service proposal was rejected. * </pre> * * <code>REJECTED = 3;</code> */ public static final int REJECTED_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static ApprovalState valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static ApprovalState forNumber(int value) { switch (value) { case 0: return APPROVAL_STATE_UNSPECIFIED; case 1: return PENDING; case 2: return ESTABLISHED; case 3: return REJECTED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<ApprovalState> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<ApprovalState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<ApprovalState>() { public ApprovalState findValueByNumber(int number) { return ApprovalState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.Handshake.getDescriptor() .getEnumTypes() .get(0); } private static final ApprovalState[] VALUES = values(); public static ApprovalState valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private ApprovalState(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.shopping.merchant.accounts.v1.Handshake.ApprovalState) } /** * * * <pre> * The various actors that can be involved in a handshake. * </pre> * * Protobuf enum {@code google.shopping.merchant.accounts.v1.Handshake.Actor} */ public enum Actor implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unspecified actor. * </pre> * * <code>ACTOR_UNSPECIFIED = 0;</code> */ ACTOR_UNSPECIFIED(0), /** * * * <pre> * The last change was done by the account who has this service. * </pre> * * <code>ACCOUNT = 1;</code> */ ACCOUNT(1), /** * * * <pre> * The last change was done by the other party who this service points to. * </pre> * * <code>OTHER_PARTY = 2;</code> */ OTHER_PARTY(2), UNRECOGNIZED(-1), ; /** * * * <pre> * Unspecified actor. * </pre> * * <code>ACTOR_UNSPECIFIED = 0;</code> */ public static final int ACTOR_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The last change was done by the account who has this service. * </pre> * * <code>ACCOUNT = 1;</code> */ public static final int ACCOUNT_VALUE = 1; /** * * * <pre> * The last change was done by the other party who this service points to. * </pre> * * <code>OTHER_PARTY = 2;</code> */ public static final int OTHER_PARTY_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Actor valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Actor forNumber(int value) { switch (value) { case 0: return ACTOR_UNSPECIFIED; case 1: return ACCOUNT; case 2: return OTHER_PARTY; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Actor> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Actor> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Actor>() { public Actor findValueByNumber(int number) { return Actor.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.Handshake.getDescriptor() .getEnumTypes() .get(1); } private static final Actor[] VALUES = values(); public static Actor valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Actor(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.shopping.merchant.accounts.v1.Handshake.Actor) } public static final int APPROVAL_STATE_FIELD_NUMBER = 1; private int approvalState_ = 0; /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for approvalState. */ @java.lang.Override public int getApprovalStateValue() { return approvalState_; } /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The approvalState. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState getApprovalState() { com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState result = com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState.forNumber(approvalState_); return result == null ? com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState.UNRECOGNIZED : result; } public static final int ACTOR_FIELD_NUMBER = 2; private int actor_ = 0; /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for actor. */ @java.lang.Override public int getActorValue() { return actor_; } /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The actor. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake.Actor getActor() { com.google.shopping.merchant.accounts.v1.Handshake.Actor result = com.google.shopping.merchant.accounts.v1.Handshake.Actor.forNumber(actor_); return result == null ? com.google.shopping.merchant.accounts.v1.Handshake.Actor.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (approvalState_ != com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState .APPROVAL_STATE_UNSPECIFIED .getNumber()) { output.writeEnum(1, approvalState_); } if (actor_ != com.google.shopping.merchant.accounts.v1.Handshake.Actor.ACTOR_UNSPECIFIED.getNumber()) { output.writeEnum(2, actor_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (approvalState_ != com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState .APPROVAL_STATE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, approvalState_); } if (actor_ != com.google.shopping.merchant.accounts.v1.Handshake.Actor.ACTOR_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, actor_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1.Handshake)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1.Handshake other = (com.google.shopping.merchant.accounts.v1.Handshake) obj; if (approvalState_ != other.approvalState_) return false; if (actor_ != other.actor_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + APPROVAL_STATE_FIELD_NUMBER; hash = (53 * hash) + approvalState_; hash = (37 * hash) + ACTOR_FIELD_NUMBER; hash = (53 * hash) + actor_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.Handshake parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.Handshake parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.Handshake parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.shopping.merchant.accounts.v1.Handshake prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The current status of establishing of the service. * (for example, pending approval or approved). * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.Handshake} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.Handshake) com.google.shopping.merchant.accounts.v1.HandshakeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.AccountServicesProto .internal_static_google_shopping_merchant_accounts_v1_Handshake_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.AccountServicesProto .internal_static_google_shopping_merchant_accounts_v1_Handshake_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.Handshake.class, com.google.shopping.merchant.accounts.v1.Handshake.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1.Handshake.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; approvalState_ = 0; actor_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1.AccountServicesProto .internal_static_google_shopping_merchant_accounts_v1_Handshake_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1.Handshake.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake build() { com.google.shopping.merchant.accounts.v1.Handshake result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake buildPartial() { com.google.shopping.merchant.accounts.v1.Handshake result = new com.google.shopping.merchant.accounts.v1.Handshake(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.shopping.merchant.accounts.v1.Handshake result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.approvalState_ = approvalState_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.actor_ = actor_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1.Handshake) { return mergeFrom((com.google.shopping.merchant.accounts.v1.Handshake) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.Handshake other) { if (other == com.google.shopping.merchant.accounts.v1.Handshake.getDefaultInstance()) return this; if (other.approvalState_ != 0) { setApprovalStateValue(other.getApprovalStateValue()); } if (other.actor_ != 0) { setActorValue(other.getActorValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { approvalState_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { actor_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int approvalState_ = 0; /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for approvalState. */ @java.lang.Override public int getApprovalStateValue() { return approvalState_; } /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for approvalState to set. * @return This builder for chaining. */ public Builder setApprovalStateValue(int value) { approvalState_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The approvalState. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState getApprovalState() { com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState result = com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState.forNumber( approvalState_); return result == null ? com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState.UNRECOGNIZED : result; } /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The approvalState to set. * @return This builder for chaining. */ public Builder setApprovalState( com.google.shopping.merchant.accounts.v1.Handshake.ApprovalState value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; approvalState_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. The approval state of this handshake. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.ApprovalState approval_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearApprovalState() { bitField0_ = (bitField0_ & ~0x00000001); approvalState_ = 0; onChanged(); return this; } private int actor_ = 0; /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for actor. */ @java.lang.Override public int getActorValue() { return actor_; } /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for actor to set. * @return This builder for chaining. */ public Builder setActorValue(int value) { actor_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The actor. */ @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake.Actor getActor() { com.google.shopping.merchant.accounts.v1.Handshake.Actor result = com.google.shopping.merchant.accounts.v1.Handshake.Actor.forNumber(actor_); return result == null ? com.google.shopping.merchant.accounts.v1.Handshake.Actor.UNRECOGNIZED : result; } /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The actor to set. * @return This builder for chaining. */ public Builder setActor(com.google.shopping.merchant.accounts.v1.Handshake.Actor value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; actor_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. The most recent account to modify the account service's * `approval_status`. * </pre> * * <code> * .google.shopping.merchant.accounts.v1.Handshake.Actor actor = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearActor() { bitField0_ = (bitField0_ & ~0x00000002); actor_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.Handshake) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.Handshake) private static final com.google.shopping.merchant.accounts.v1.Handshake DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.Handshake(); } public static com.google.shopping.merchant.accounts.v1.Handshake getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Handshake> PARSER = new com.google.protobuf.AbstractParser<Handshake>() { @java.lang.Override public Handshake parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Handshake> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Handshake> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.Handshake getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
hibernate/hibernate-orm
31,339
hibernate-core/src/test/java/org/hibernate/orm/test/sql/hand/query/NativeSQLQueriesTest.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.orm.test.sql.hand.query; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Blob; import java.sql.Clob; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import org.hibernate.Hibernate; import org.hibernate.QueryException; import org.hibernate.Transaction; import org.hibernate.cfg.Environment; import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.MySQLDialect; import org.hibernate.orm.test.sql.hand.Dimension; import org.hibernate.orm.test.sql.hand.Employment; import org.hibernate.orm.test.sql.hand.Group; import org.hibernate.orm.test.sql.hand.ImageHolder; import org.hibernate.orm.test.sql.hand.Order; import org.hibernate.orm.test.sql.hand.Organization; import org.hibernate.orm.test.sql.hand.Person; import org.hibernate.orm.test.sql.hand.Product; import org.hibernate.orm.test.sql.hand.SpaceShip; import org.hibernate.orm.test.sql.hand.Speech; import org.hibernate.orm.test.sql.hand.TextHolder; import org.hibernate.query.NativeQuery; import org.hibernate.query.Query; import org.hibernate.transform.ResultTransformer; import org.hibernate.transform.Transformers; import org.hibernate.type.StandardBasicTypes; import org.hibernate.testing.orm.junit.JiraKey; import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.FailureExpected; import org.hibernate.testing.orm.junit.RequiresDialect; import org.hibernate.testing.orm.junit.ServiceRegistry; import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.Setting; import org.hibernate.testing.orm.junit.SkipForDialect; import org.junit.jupiter.api.Test; import jakarta.persistence.PersistenceException; import static org.hibernate.testing.orm.junit.ExtraAssertions.assertClassAssignability; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; /** * Tests of various features of native SQL queries. * * @author Steve Ebersole */ @ServiceRegistry( settings = { @Setting( name = Environment.GENERATE_STATISTICS, value = "true" ) } ) @DomainModel( xmlMappings = { "org/hibernate/orm/test/sql/hand/query/NativeSQLQueries.hbm.xml" } ) @SessionFactory @SuppressWarnings({"deprecation", "unused", "rawtypes", "unchecked"}) public class NativeSQLQueriesTest { protected String getOrganizationFetchJoinEmploymentSQL() { return "SELECT org.orgid as {org.id}, " + " org.name as {org.name}, " + " emp.employer as {emp.key}, " + " emp.empid as {emp.element}, " + " {emp.element.*} " + "FROM ORGANIZATION org " + " LEFT OUTER JOIN EMPLOYMENT emp ON org.orgid = emp.employer"; } protected String getOrganizationJoinEmploymentSQL() { return "SELECT org.orgid as {org.id}, " + " org.name as {org.name}, " + " {emp.*} " + "FROM ORGANIZATION org " + " LEFT OUTER JOIN EMPLOYMENT emp ON org.orgid = emp.employer"; } protected String getEmploymentSQL() { return "SELECT * FROM EMPLOYMENT"; } protected String getEmploymentSQLMixedScalarEntity() { return "SELECT e.*, e.employer as employerid FROM EMPLOYMENT e" ; } protected String getOrgEmpRegionSQL() { return "select {org.*}, {emp.*}, emp.region_code " + "from ORGANIZATION org " + " left outer join EMPLOYMENT emp on org.orgid = emp.employer"; } protected String getOrgEmpPersonSQL() { return "select {org.*}, {emp.*}, {pers.*} " + "from ORGANIZATION org " + " join EMPLOYMENT emp on org.orgid = emp.employer " + " join PERSON pers on pers.perid = emp.employee "; } protected String getDescriptionsSQL() { return "select description from TEXT_HOLDER"; } protected String getPhotosSQL() { return "select photo from IMAGE_HOLDER"; } @Test @SkipForDialect( dialectClass = H2Dialect.class ) public void testFailOnNoAddEntityOrScalar(SessionFactoryScope scope) { // Note: this passes, but for the wrong reason. // there is actually an exception thrown, but it is the database // throwing a sql exception because the SQL gets passed // "un-processed"... // // Oddly, H2 accepts this query. scope.inSession( session -> { session.beginTransaction(); try { String sql = "select {org.*} " + "from organization org"; session.createNativeQuery( sql ).list(); fail( "Should throw an exception since no addEntity nor addScalar has been performed." ); } catch( PersistenceException pe) { // expected behavior } finally { session.getTransaction().rollback(); session.close(); } } ); } @Test public void testManualSynchronization(SessionFactoryScope scope) { scope.inTransaction( session -> { session.getSessionFactory().getStatistics().clear(); // create an Organization... Organization jboss = new Organization( "JBoss" ); session.persist( jboss ); // now query on Employment, this should not cause an auto-flush session.createNativeQuery( getEmploymentSQL() ).addSynchronizedQuerySpace( "ABC" ).list(); assertEquals( 0, session.getSessionFactory().getStatistics().getEntityInsertCount() ); // now try to query on Employment but this time add Organization as a synchronized query space... session.createNativeQuery( getEmploymentSQL() ).addSynchronizedEntityClass( Organization.class ).list(); assertEquals( 1, session.getSessionFactory().getStatistics().getEntityInsertCount() ); // clean up session.remove( jboss ); } ); } @Test public void testSQLQueryInterface(SessionFactoryScope scope) { Organization ifa = new Organization("IFA"); Organization jboss = new Organization("JBoss"); Person gavin = new Person("Gavin"); Employment emp = new Employment(gavin, jboss, "AU"); scope.inTransaction( session -> { session.persist(ifa); session.persist(jboss); session.persist(gavin); session.persist(emp); session.flush(); List l = session.createNativeQuery( getOrgEmpRegionSQL() ) .addEntity("org", Organization.class) .addJoin("emp", "org.employments") .addScalar("region_code", StandardBasicTypes.STRING ) .list(); assertEquals( 2, l.size() ); l = session.createNativeQuery( getOrgEmpPersonSQL() ) .addEntity("org", Organization.class) .addJoin("emp", "org.employments") .addJoin("pers", "emp.employee") .list(); assertEquals( 1, l.size() ); } ); scope.inTransaction( session -> { List l = session.createNativeQuery( "select {org.*}, {emp.*} " + "from ORGANIZATION org " + " left outer join EMPLOYMENT emp on org.orgid = emp.employer, ORGANIZATION org2" ) .addEntity("org", Organization.class) .addJoin("emp", "org.employments") .setResultListTransformer( list -> { List<Object> result = new ArrayList<>( list.size() ); Map<Object, Object> distinct = new IdentityHashMap<>(); for ( Object entity : list ) { if ( distinct.put( entity, entity ) == null ) { result.add( entity ); } } return result; } ) .list(); assertEquals( 2, l.size() ); } ); scope.inTransaction( session -> { session.remove(emp); session.remove(gavin); session.remove(ifa); session.remove(jboss); } ); } @Test public void testResultSetMappingDefinition(SessionFactoryScope scope) { Organization ifa = new Organization("IFA"); Organization jboss = new Organization("JBoss"); Person gavin = new Person("Gavin"); Employment emp = new Employment(gavin, jboss, "AU"); scope.inTransaction( session -> { session.persist(ifa); session.persist(jboss); session.persist(gavin); session.persist(emp); session.flush(); List l = session.createNativeQuery( getOrgEmpRegionSQL(), "org-emp-regionCode" ).list(); assertEquals( 2, l.size() ); l = session.createNativeQuery( getOrgEmpPersonSQL(), "org-emp-person" ).list(); assertEquals( 1, l.size() ); session.remove(emp); session.remove(gavin); session.remove(ifa); session.remove(jboss); } ); } @Test public void testResultSetMappingDefinitionWithResultClass(SessionFactoryScope scope) { Organization ifa = new Organization("IFA"); Organization jboss = new Organization("JBoss"); Person gavin = new Person("Gavin"); Employment emp = new Employment(gavin, jboss, "AU"); scope.inTransaction( session -> { session.persist(ifa); session.persist(jboss); session.persist(gavin); session.persist(emp); session.flush(); List<Object[]> l = session.createNativeQuery( getOrgEmpRegionSQL(), "org-emp-regionCode", Object[].class ).list(); assertEquals( 2, l.size() ); l = session.createNativeQuery( getOrgEmpPersonSQL(), "org-emp-person", Object[].class ).list(); assertEquals( 1, l.size() ); session.remove(emp); session.remove(gavin); session.remove(ifa); session.remove(jboss); } ); } @Test public void testScalarValues(SessionFactoryScope scope) { Organization ifa = new Organization( "IFA" ); Organization jboss = new Organization( "JBoss" ); Object idIfa = scope.fromTransaction( session -> { session.persist( ifa ); return ifa.getId(); } ); Object idJBoss = scope.fromTransaction( session -> { session.persist( jboss ); return jboss.getId(); } ); scope.inTransaction( session -> { List result = session.getNamedQuery( "orgNamesOnly" ).list(); assertTrue( result.contains( "IFA" ) ); assertTrue( result.contains( "JBoss" ) ); result = session.getNamedQuery( "orgNamesOnly" ).setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP).list(); Map m = (Map) result.get(0); assertEquals( 2, result.size() ); assertEquals( 1, m.size() ); assertTrue( m.containsKey("name") ); } ); scope.inTransaction( session -> { Iterator iter = session.getNamedQuery( "orgNamesAndOrgs" ).list().iterator(); Object[] o = ( Object[] ) iter.next(); assertEquals( 2, o.length, "expecting 2 values" ); assertEquals( "IFA", o[0] ); assertEquals( "IFA", ( ( Organization ) o[1] ).getName() ); o = ( Object[] ) iter.next(); assertEquals( "JBoss", o[0] ); assertEquals( "JBoss", ( ( Organization ) o[1] ).getName() ); } ); scope.inTransaction( session -> { // test that the ordering of the results is truly based on the order in which they were defined Iterator iter = session.getNamedQuery( "orgsAndOrgNames" ).list().iterator(); Object[] row = ( Object[] ) iter.next(); assertEquals( 2, row.length, "expecting 2 values" ); assertEquals( Organization.class, row[0].getClass(), "expecting non-scalar result first" ); assertEquals( String.class, row[1].getClass(), "expecting scalar result second" ); assertEquals( "IFA", ( ( Organization ) row[0] ).getName() ); assertEquals( "IFA", row[1] ); row = ( Object[] ) iter.next(); assertEquals( Organization.class, row[0].getClass(), "expecting non-scalar result first" ); assertEquals( String.class, row[1].getClass(), "expecting scalar result second" ); assertEquals( "JBoss", ( ( Organization ) row[0] ).getName() ); assertEquals( "JBoss", row[1] ); assertFalse( iter.hasNext() ); } ); scope.inTransaction( session -> { Iterator iter = session.getNamedQuery( "orgIdsAndOrgNames" ).list().iterator(); Object[] o = ( Object[] ) iter.next(); assertEquals( "IFA", o[1] ); assertEquals( o[0], idIfa ); o = ( Object[] ) iter.next(); assertEquals( "JBoss", o[1] ); assertEquals( o[0], idJBoss ); session.remove( ifa ); session.remove( jboss ); } ); } @Test public void testMappedAliasStrategy(SessionFactoryScope scope) { Organization ifa = new Organization("IFA"); Organization jboss = new Organization("JBoss"); Person gavin = new Person("Gavin"); Employment emp = new Employment(gavin, jboss, "AU"); scope.inTransaction( session -> { session.persist(jboss); session.persist(ifa); session.persist(gavin); session.persist(emp); } ); scope.inTransaction( session -> { Query namedQuery = session.getNamedQuery("AllEmploymentAsMapped"); List list = namedQuery.list(); assertEquals(1,list.size()); Employment emp2 = (Employment) list.get(0); assertEquals(emp2.getEmploymentId(), emp.getEmploymentId() ); assertEquals(emp2.getStartDate().getDate(), emp.getStartDate().getDate() ); assertEquals(emp2.getEndDate(), emp.getEndDate() ); } ); scope.inTransaction( session -> { Query sqlQuery = session.getNamedQuery("EmploymentAndPerson"); sqlQuery.setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP); List list = sqlQuery.list(); assertEquals(1,list.size() ); Object res = list.get(0); assertClassAssignability( Map.class, res.getClass() ); Map m = (Map) res; assertEquals(2,m.size()); } ); scope.inTransaction( session -> { Query sqlQuery = session.getNamedQuery( "organizationreturnproperty" ); sqlQuery.setResultTransformer( Transformers.ALIAS_TO_ENTITY_MAP ); List list = sqlQuery.list(); assertEquals( 2,list.size() ); Map m = (Map) list.get(0); assertEquals( 1, m.size() ); assertTrue( m.containsKey("org") ); assertClassAssignability( m.get("org").getClass(), Organization.class ); if ( jboss.getId() == ( (Organization) m.get("org") ).getId() ) { assertTrue( Hibernate.isInitialized( ( (Organization) m.get("org") ).getEmployments() ) ); } Map m2 = (Map) list.get(1); assertEquals( 1, m.size() ); assertTrue( m2.containsKey("org") ); assertClassAssignability( m2.get("org").getClass(), Organization.class ); if ( ifa.getId() == ( (Organization) m2.get("org") ).getId() ) { assertTrue( Hibernate.isInitialized( ( (Organization) m2.get("org") ).getEmployments() ) ); } } ); scope.inTransaction( session -> { Query namedQuery = session.getNamedQuery("EmploymentAndPerson"); List list = namedQuery.list(); assertEquals(1,list.size() ); Object[] objs = (Object[]) list.get(0); assertEquals(2, objs.length); Employment emp2 = (Employment) objs[0]; Person _gavin = (Person) objs[1]; session.remove(emp2); session.remove(jboss); session.remove(_gavin); session.remove(ifa); } ); } @Test @FailureExpected( jiraKey = "unknown" ) public void testCompositeIdJoins(SessionFactoryScope scope) { scope.inTransaction( session -> { Person person = new Person(); person.setName( "Noob" ); Product product = new Product(); product.setProductId( new Product.ProductId() ); product.getProductId().setOrgid( "x" ); product.getProductId().setProductnumber( "1234" ); product.setName( "Hibernate 3" ); Order order = new Order(); order.setOrderId( new Order.OrderId() ); order.getOrderId().setOrdernumber( "1" ); order.getOrderId().setOrgid( "y" ); product.getOrders().add( order ); order.setProduct( product ); order.setPerson( person ); session.persist( product ); session.persist( order); session.persist( person ); } ); scope.inTransaction( session -> { Product p = (Product) session.createQuery( "from Product p join fetch p.orders" ).list().get(0); assertTrue(Hibernate.isInitialized( p.getOrders())); } ); scope.inTransaction( session -> { Object[] o = (Object[]) session.createNativeQuery( "select\r\n" + " product.orgid as {product.id.orgid}," + " product.productnumber as {product.id.productnumber}," + " {prod_orders}.orgid as orgid3_1_,\r\n" + " {prod_orders}.ordernumber as ordernum2_3_1_,\r\n" + " product.name as {product.name}," + " {prod_orders.element.*}" + /*" orders.PROD_NO as PROD4_3_1_,\r\n" + " orders.person as person3_1_,\r\n" + " orders.PROD_ORGID as PROD3_0__,\r\n" + " orders.PROD_NO as PROD4_0__,\r\n" + " orders.orgid as orgid0__,\r\n" + " orders.ordernumber as ordernum2_0__ \r\n" +*/ "from\r\n" + " Product product \r\n" + " inner join\r\n" + " TBL_ORDER {prod_orders} \r\n" + " on product.orgid={prod_orders}.PROD_ORGID \r\n" + " and product.productnumber={prod_orders}.PROD_NO" ) .addEntity( "product", Product.class ) .addJoin( "prod_orders", "product.orders" ) .list().get(0); Product p = (Product) o[0]; assertTrue(Hibernate.isInitialized( p.getOrders() )); assertNotNull(p.getOrders().iterator().next()); } ); } @Test public void testAutoDetectAliasing(SessionFactoryScope scope) { Organization ifa = new Organization("IFA"); Organization jboss = new Organization("JBoss"); Person gavin = new Person("Gavin"); Employment emp = new Employment(gavin, jboss, "AU"); scope.inTransaction( session -> { session.persist(jboss); session.persist(ifa); session.persist(gavin); session.persist(emp); } ); Employment emp2 = scope.fromTransaction( session -> { List list = session.createNativeQuery( getEmploymentSQL() ) .addEntity( Employment.class.getName() ) .list(); assertEquals( 1, list.size() ); Employment _emp2 = (Employment) list.get( 0 ); assertEquals( _emp2.getEmploymentId(), emp.getEmploymentId() ); assertEquals( _emp2.getStartDate().getDate(), emp.getStartDate().getDate() ); assertEquals( _emp2.getEndDate(), emp.getEndDate() ); return _emp2; } ); scope.inTransaction( session -> { List list = session.createNativeQuery( getEmploymentSQL() ) .addEntity( Employment.class.getName() ) .setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP) .list(); assertEquals( 1,list.size() ); Map m = (Map) list.get(0); assertTrue(m.containsKey("Employment")); assertEquals(1,m.size()); list = session.createNativeQuery(getEmploymentSQL()).list(); assertEquals(1, list.size()); Object[] o = (Object[]) list.get(0); assertEquals(8, o.length); list = session.createNativeQuery( getEmploymentSQL() ).setResultTransformer( new UpperCasedAliasToEntityMapResultTransformer() ).list(); assertEquals(1, list.size()); m = (Map) list.get(0); assertTrue(m.containsKey("EMPID")); assertTrue(m.containsKey("AMOUNT")); assertTrue(m.containsKey("END_DATE")); assertEquals(8, m.size()); list = session.createNativeQuery( getEmploymentSQLMixedScalarEntity() ).addScalar( "employerid" ).addEntity( Employment.class ).list(); assertEquals(1, list.size()); o = (Object[]) list.get(0); assertEquals(2, o.length); assertClassAssignability( Number.class, o[0].getClass() ); assertClassAssignability( Employment.class, o[1].getClass() ); Query queryWithCollection = session.getNamedQuery("organizationEmploymentsExplicitAliases"); queryWithCollection.setParameter("id", jboss.getId() ); list = queryWithCollection.list(); assertEquals( 1, list.size() ); session.clear(); list = session.createNativeQuery( getOrganizationJoinEmploymentSQL() ) .addEntity( "org", Organization.class ) .addJoin( "emp", "org.employments" ) .list(); assertEquals( 2,list.size() ); session.clear(); list = session.createNativeQuery( getOrganizationFetchJoinEmploymentSQL() ) .addEntity( "org", Organization.class ) .addJoin( "emp", "org.employments" ) .list(); assertEquals( 2,list.size() ); session.clear(); // TODO : why twice? session.getNamedQuery( "organizationreturnproperty" ).list(); list = session.getNamedQuery( "organizationreturnproperty" ).list(); assertEquals( 2,list.size() ); session.clear(); list = session.getNamedQuery( "organizationautodetect" ).list(); assertEquals( 2,list.size() ); } ); scope.inTransaction( session -> { session.remove(emp2); session.remove(jboss); session.remove(gavin); session.remove(ifa); } ); scope.inTransaction( session -> { Dimension dim = new Dimension( 3, 30 ); session.persist( dim ); List list = session.createNativeQuery( "select d_len * d_width as surface, d_len * d_width * 10 as volume from Dimension" ).list(); session.remove( dim ); } ); scope.inTransaction( session -> { SpaceShip enterprise = new SpaceShip(); enterprise.setModel( "USS" ); enterprise.setName( "Entreprise" ); enterprise.setSpeed( 50d ); Dimension d = new Dimension(45, 10); enterprise.setDimensions( d ); session.persist( enterprise ); session.flush(); Object[] result = (Object[]) session.getNamedQuery( "spaceship" ).uniqueResult(); assertEquals( 3, result.length, "expecting 3 result values" ); enterprise = ( SpaceShip ) result[0]; assertEquals( 50d, enterprise.getSpeed() ); assertEquals( 450d, extractDoubleValue( result[1] ) ); assertEquals( 4500d, extractDoubleValue( result[2] ) ); session.remove( enterprise ); } ); } @Test public void testExplicitReturnAPI(SessionFactoryScope scope) { Organization jboss = new Organization( "JBoss" ); Person me = new Person( "Steve" ); Employment emp = new Employment( me, jboss, "US" ); scope.inTransaction( session -> { session.persist( jboss ); session.persist( me ); session.persist( emp ); } ); scope.inTransaction( session -> { String sql = "SELECT org.orgid as orgid," + " org.name as name," + " emp.empid as empid," + " emp.employee as employee," + " emp.employer as employer," + " emp.start_date as start_date," + " emp.end_date as end_date," + " emp.region_code as region_code," + " emp.amount as amount," + " emp.currency as currency" + " FROM ORGANIZATION org" + " LEFT OUTER JOIN EMPLOYMENT emp ON org.orgid = emp.employer"; // as a control, lets apply an existing rs mapping NativeQuery sqlQuery = session.createNativeQuery( sql, "org-description" ); sqlQuery.list(); // next try a partial mapping def sqlQuery.addRoot( "org", Organization.class ); sqlQuery.addFetch( "emp", "org", "employments" ); sqlQuery.list(); // now try full explicit mappings sqlQuery.addRoot( "org", Organization.class ) .addProperty( "id", "orgid" ) .addProperty( "name" ).addColumnAlias( "name" ); sqlQuery.addFetch( "emp", "org", "employments" ) .addProperty( "key", "employer" ) .addProperty( "element", "empid" ) .addProperty( "element.employee", "employee" ) .addProperty( "element.employer", "employer" ) .addProperty( "element.startDate", "startDate" ) .addProperty( "element.endDate", "endDate" ) .addProperty( "element.regionCode", "regionCode" ) .addProperty( "element.employmentId", "empId" ) .addProperty( "element.salary" ).addColumnAlias( "AMOUNT" ).addColumnAlias( "CURRENCY" ); sqlQuery.list(); // lets try a totally different approach now and pull back scalars, first with explicit types sqlQuery.addScalar( "orgid", StandardBasicTypes.LONG ) .addScalar( "name", StandardBasicTypes.STRING ) .addScalar( "empid", StandardBasicTypes.LONG ) .addScalar( "employee", StandardBasicTypes.LONG ) .addScalar( "startDate", StandardBasicTypes.TIMESTAMP ) .addScalar( "endDate", StandardBasicTypes.TIMESTAMP ) .addScalar( "regionCode", StandardBasicTypes.STRING ) .addScalar( "empId", StandardBasicTypes.LONG ) .addScalar( "AMOUNT", StandardBasicTypes.FLOAT ) .addScalar( "CURRENCY", StandardBasicTypes.STRING ); } ); scope.inTransaction( session -> { session.remove( emp ); session.remove( jboss ); session.remove( me ); } ); } @Test public void testMixAndMatchEntityScalar(SessionFactoryScope scope) { scope.inSession( session -> { Transaction t = session.beginTransaction(); Speech speech = new Speech(); speech.setLength( 23d ); speech.setName( "Mine" ); session.persist( speech ); session.flush(); session.clear(); List l = session.createNativeQuery( "select name, id, flength, name as scalar_name from Speech", "speech" ).list(); assertEquals( 1, l.size() ); t.rollback(); } ); } private double extractDoubleValue(Object value) { if ( value instanceof BigInteger ) { return ( ( BigInteger ) value ).doubleValue(); } else if ( value instanceof BigDecimal ) { return ( ( BigDecimal ) value ).doubleValue(); } else { return Double.parseDouble( value.toString() ); } } @Test public void testAddJoinForManyToMany(SessionFactoryScope scope) { Person gavin = new Person( "Gavin" ); Person max = new Person( "Max" ); Person pete = new Person( "Pete" ); Group hibernate = new Group( "Hibernate" ); Group seam = new Group( "Seam" ); scope.inTransaction( session -> { session.persist( gavin ); session.persist( max ); session.persist( pete ); session.persist( seam ); session.persist( hibernate ); hibernate.getPersons().add( gavin ); hibernate.getPersons().add( max ); seam.getPersons().add( gavin ); seam.getPersons().add( pete ); session.flush(); session.clear(); // todo : see HHH-3908 // String sqlStr = "SELECT {groupp.*} , {gp.*} " + // "FROM GROUPP groupp, GROUP_PERSON gp, PERSON person WHERE groupp.ID = gp.GROUP_ID and person.PERID = gp.PERSON_ID"; // // List l = session.createSQLQuery( sqlStr ) // .addEntity("groupp", Group.class) // .addJoin("gp","groupp.persons") // .list(); List l = session.getNamedQuery( "manyToManyFetch" ).list(); //assertEquals( 2, l.size() ); } ); scope.inTransaction( session -> { seam.getPersons().remove( gavin ); seam.getPersons().remove( pete ); hibernate.getPersons().remove( gavin ); hibernate.getPersons().remove( max ); session.remove( seam ); session.remove( hibernate ); session.remove( gavin ); session.remove( max ); session.remove( pete ); } ); } @Test @JiraKey( "HHH-15102" ) @SkipForDialect(dialectClass = MySQLDialect.class, matchSubTypes = true) public void testCommentInSQLQuery(SessionFactoryScope scope) { scope.inTransaction( s -> s.createNativeQuery( "select sum(1) --count(*), effectively\nfrom ORGANIZATION" ).getSingleResult() ); } @Test public void testTextTypeInSQLQuery(SessionFactoryScope scope) { String description = buildLongString( 15000, 'a' ); TextHolder holder = new TextHolder( description ); scope.inTransaction( session -> session.persist( holder ) ); scope.inTransaction( session -> { Object result = session.createNativeQuery( getDescriptionsSQL() ).uniqueResult(); String descriptionRead; if ( result instanceof String ) { descriptionRead = (String) result; } else { Clob clob = (Clob) result; try { descriptionRead = clob.getSubString( 1L, (int) clob.length() ); } catch (SQLException e) { throw new RuntimeException( e ); } } assertEquals( description, descriptionRead ); session.remove( holder ); } ); } @Test public void testImageTypeInSQLQuery(SessionFactoryScope scope) { // Make sure the last byte is non-zero as Sybase cuts that off byte[] photo = buildLongByteArray( 14999, true ); ImageHolder holder = new ImageHolder( photo ); scope.inTransaction( session -> session.persist( holder ) ); scope.inTransaction( session -> { Object result = session.createNativeQuery( getPhotosSQL() ).uniqueResult(); byte[] photoRead; if ( result instanceof byte[] ) { photoRead = (byte[]) result; } else { Blob blob = (Blob) result; try { photoRead = blob.getBytes( 1L, (int) blob.length() ); } catch (SQLException e) { throw new RuntimeException( e ); } } assertArrayEquals( photo, photoRead ); session.remove( holder ); } ); } @Test @RequiresDialect(value = MySQLDialect.class, majorVersion = 5) public void testEscapeColonInSQL(SessionFactoryScope scope) throws QueryException { scope.inTransaction( session -> { NativeQuery query = session.createNativeQuery( "SELECT @row \\:= 1" ); List list = query.list(); assertEquals( "1", list.get( 0 ).toString() ); } ); } @Test @JiraKey( value = "HHH-14487") public void testAliasToBeanMap(SessionFactoryScope scope) { Person gavin = new Person( "Gavin" ); scope.inTransaction( session -> session.persist( gavin ) ); scope.inTransaction( session -> { HashMap result = (HashMap) session.createNativeQuery( "select * from PERSON" ) .setResultTransformer( Transformers.aliasToBean( HashMap.class ) ) .uniqueResult(); assertEquals( "Gavin", result.get( "NAME" ) == null ? result.get( "name" ) : result.get( "NAME" ) ); session.remove( gavin ); } ); } private String buildLongString(int size, char baseChar) { StringBuilder buff = new StringBuilder(); for( int i = 0; i < size; i++ ) { buff.append( baseChar ); } return buff.toString(); } private byte[] buildLongByteArray(int size, boolean on) { byte[] data = new byte[size]; data[0] = mask( on ); for ( int i = 0; i < size; i++ ) { data[i] = mask( on ); on = !on; } return data; } private byte mask(boolean on) { return on ? ( byte ) 1 : ( byte ) 0; } private static class UpperCasedAliasToEntityMapResultTransformer implements ResultTransformer<Object> { public Object transformTuple(Object[] tuple, String[] aliases) { Map<String,Object> result = new HashMap<>( tuple.length ); for ( int i = 0; i < tuple.length; i++ ) { String alias = aliases[i]; if ( alias != null ) { result.put( alias.toUpperCase(Locale.ROOT), tuple[i] ); } } return result; } } }
apache/manifoldcf
35,166
connectors/forcedmetadata/connector/src/main/java/org/apache/manifoldcf/agents/transformation/forcedmetadata/ForcedMetadataConnector.java
/* $Id$ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.agents.transformation.forcedmetadata; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import java.io.*; import java.util.*; /** This connector works as a transformation connector, and merely adds specified metadata items. * */ public class ForcedMetadataConnector extends org.apache.manifoldcf.agents.transformation.BaseTransformationConnector { public static final String _rcsid = "@(#)$Id$"; // Nodes and attributes representing parameters and values. // There will be node for every parameter/value pair. public static final String NODE_EXPRESSION = "expression"; public static final String NODE_PAIR = "pair"; public static final String ATTRIBUTE_PARAMETER = "parameter"; public static final String NODE_FIELDMAP = "fieldmap"; public static final String NODE_KEEPMETADATA = "keepAllMetadata"; public static final String NODE_FILTEREMPTY = "filterEmpty"; public static final String ATTRIBUTE_SOURCE = "source"; public static final String ATTRIBUTE_TARGET = "target"; public static final String ATTRIBUTE_VALUE = "value"; // Templates private static final String VIEW_SPEC = "viewSpecification.html"; private static final String EDIT_SPEC_HEADER = "editSpecification.js"; private static final String EDIT_SPEC_EXPRESSIONS = "editSpecification_Expressions.html"; /** Get a pipeline version string, given a pipeline specification object. The version string is used to * uniquely describe the pertinent details of the specification and the configuration, to allow the Connector * Framework to determine whether a document will need to be processed again. * Note that the contents of any document cannot be considered by this method; only configuration and specification information * can be considered. * * This method presumes that the underlying connector object has been configured. *@param spec is the current pipeline specification object for this connection for the job that is doing the crawling. *@return a string, of unlimited length, which uniquely describes configuration and specification in such a way that * if two such strings are equal, nothing that affects how or whether the document is indexed will be different. */ @Override public VersionContext getPipelineDescription(Specification spec) throws ManifoldCFException, ServiceInterruption { SpecPacker sp = new SpecPacker(spec); return new VersionContext(sp.toPackedString(),params,spec); } /** Add (or replace) a document in the output data store using the connector. * This method presumes that the connector object has been configured, and it is thus able to communicate with the output data store should that be * necessary. * The OutputSpecification is *not* provided to this method, because the goal is consistency, and if output is done it must be consistent with the * output description, since that was what was partly used to determine if output should be taking place. So it may be necessary for this method to decode * an output description string in order to determine what should be done. *@param documentURI is the URI of the document. The URI is presumed to be the unique identifier which the output data store will use to process * and serve the document. This URI is constructed by the repository connector which fetches the document, and is thus universal across all output connectors. *@param pipelineDescription is the description string that was constructed for this document by the getOutputDescription() method. *@param document is the document data to be processed (handed to the output data store). *@param authorityNameString is the name of the authority responsible for authorizing any access tokens passed in with the repository document. May be null. *@param activities is the handle to an object that the implementer of a pipeline connector may use to perform operations, such as logging processing activity, * or sending a modified document to the next stage in the pipeline. *@return the document status (accepted or permanently rejected). *@throws IOException only if there's a stream error reading the document data. */ @Override public int addOrReplaceDocumentWithException(String documentURI, VersionContext pipelineDescription, RepositoryDocument document, String authorityNameString, IOutputAddActivity activities) throws ManifoldCFException, ServiceInterruption, IOException { // Unpack the forced metadata SpecPacker sp = new SpecPacker(pipelineDescription.getSpecification()); // Create a structure that will allow us access to fields without sharing Reader objects FieldDataFactory fdf = new FieldDataFactory(document); try { // We have to create a copy of the Repository Document, since we might be rearranging things RepositoryDocument docCopy = document.duplicate(); // We must explicitly copy all fields, since we can't share references to Reader objects and // expect anything to work right docCopy.clearFields(); // Clear fields, unless we're supposed to keep what we don't specify if (sp.filterEmpty()) { if (sp.keepAllMetadata()) { // Loop through fields and copy them, filtering empties Iterator<String> fields = document.getFields(); while (fields.hasNext()) { String field = fields.next(); moveData(docCopy,field,fdf,field,true); } } } else if (sp.keepAllMetadata()) { // Copy ALL current fields from old document, but go through FieldDataFactory Iterator<String> fields = document.getFields(); while (fields.hasNext()) { String field = fields.next(); moveData(docCopy,field,fdf,field,false); } } // Iterate through the expressions Iterator<String> expressionKeys = sp.getExpressionKeys(); while (expressionKeys.hasNext()) { String expressionKey = expressionKeys.next(); // Get the set of expressions for the key Set<String> values = sp.getExpressionValues(expressionKey); IDataSource[] dataSources = new IDataSource[values.size()]; int k = 0; for (String expression : values) { dataSources[k++] = processExpression(expression, fdf); } int totalSize = 0; for (IDataSource dataSource : dataSources) { if (dataSource != null) totalSize += dataSource.getSize(); } if (totalSize == 0) { docCopy.removeField(expressionKey); } else { // Each IDataSource will contribute zero or more results to the final array. But here's the tricky part: // the results all must be of the same type. If there are any differences, then we have to bash them all to // strings first. Object[] allValues; k = 0; if (allDates(dataSources)) { allValues = new Date[totalSize]; for (IDataSource dataSource : dataSources) { if (dataSource != null) { for (Object o : dataSource.getRawForm()) { allValues[k++] = o; } } } docCopy.addField(expressionKey,(Date[])conditionallyRemoveNulls(allValues,sp.filterEmpty())); } else if (allReaders(dataSources)) { if (sp.filterEmpty()) allValues = new String[totalSize]; else allValues = new Reader[totalSize]; for (IDataSource dataSource : dataSources) { if (dataSource != null) { Object[] sources = sp.filterEmpty()?dataSource.getStringForm():dataSource.getRawForm(); for (Object o : sources) { allValues[k++] = o; } } } if (sp.filterEmpty()) docCopy.addField(expressionKey,removeEmpties((String[])allValues)); else docCopy.addField(expressionKey,(Reader[])allValues); } else { allValues = new String[totalSize]; // Convert to strings throughout for (IDataSource dataSource : dataSources) { if (dataSource != null) { for (Object o : dataSource.getStringForm()) { allValues[k++] = o; } } } if (sp.filterEmpty()) docCopy.addField(expressionKey,removeEmpties((String[])allValues)); else docCopy.addField(expressionKey,(String[])allValues); } } } // Finally, send the modified repository document onward to the next pipeline stage. // If we'd done anything to the stream, we'd have needed to create a new RepositoryDocument object and copied the // data into it, and closed the new stream after sendDocument() was called. return activities.sendDocument(documentURI,docCopy); } finally { fdf.close(); } } protected static boolean allDates(IDataSource[] dataSources) throws IOException, ManifoldCFException { for (IDataSource ds : dataSources) { if (ds != null && !(ds.getRawForm() instanceof Date[])) return false; } return true; } protected static boolean allReaders(IDataSource[] dataSources) throws IOException, ManifoldCFException { for (IDataSource ds : dataSources) { if (ds != null && !(ds.getRawForm() instanceof Reader[])) return false; } return true; } protected static void moveData(RepositoryDocument docCopy, String target, FieldDataFactory document, String field, boolean filterEmpty) throws ManifoldCFException, IOException { Object[] fieldData = document.getField(field); if (fieldData instanceof Date[]) docCopy.addField(target,(Date[])conditionallyRemoveNulls(fieldData,filterEmpty)); else if (fieldData instanceof Reader[]) { // To strip out empty fields, we will need to convert readers to strings if (filterEmpty) docCopy.addField(target,removeEmpties(document.getFieldAsStrings(field))); else docCopy.addField(target,(Reader[])fieldData); } else if (fieldData instanceof String[]) { String[] processedFieldData; if (filterEmpty) processedFieldData = removeEmpties((String[])fieldData); else processedFieldData = (String[])fieldData; docCopy.addField(target,processedFieldData); } } protected static String[] removeEmpties(String[] input) { int count = 0; for (String s : input) { if (s != null && s.length() > 0) count++; } if (count == input.length) return input; String[] rval = new String[count]; count = 0; for (String s : input) { if (s != null && s.length() > 0) rval[count++] = s; } return rval; } protected static Object[] conditionallyRemoveNulls(Object[] input, boolean filterEmpty) { if (!filterEmpty) return input; int count = 0; for (Object o : input) { if (o != null) count++; } if (count == input.length) return input; Object[] rval = new Object[count]; count = 0; for (Object o : input) { if (o != null) rval[count++] = o; } return rval; } // UI support methods. // // These support methods come in two varieties. The first bunch (inherited from IConnector) is involved in setting up connection configuration information. // The second bunch // is involved in presenting and editing pipeline specification information for a connection within a job. The two kinds of methods are accordingly treated differently, // in that the first bunch cannot assume that the current connector object is connected, while the second bunch can. That is why the first bunch // receives a thread context argument for all UI methods, while the second bunch does not need one (since it has already been applied via the connect() // method, above). /** Obtain the name of the form check javascript method to call. *@param connectionSequenceNumber is the unique number of this connection within the job. *@return the name of the form check javascript method. */ @Override public String getFormCheckJavascriptMethodName(int connectionSequenceNumber) { return "s"+connectionSequenceNumber+"_checkSpecification"; } /** Obtain the name of the form presave check javascript method to call. *@param connectionSequenceNumber is the unique number of this connection within the job. *@return the name of the form presave check javascript method. */ @Override public String getFormPresaveCheckJavascriptMethodName(int connectionSequenceNumber) { return "s"+connectionSequenceNumber+"_checkSpecificationForSave"; } /** Output the specification header section. * This method is called in the head section of a job page which has selected a pipeline connection of the current type. Its purpose is to add the required tabs * to the list, and to output any javascript methods that might be needed by the job editing HTML. *@param out is the output to which any HTML should be sent. *@param locale is the preferred local of the output. *@param os is the current pipeline specification for this connection. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputSpecificationHeader(IHTTPOutput out, Locale locale, Specification os, int connectionSequenceNumber, List<String> tabsArray) throws ManifoldCFException, IOException { // Output specification header tabsArray.add(Messages.getString(locale, "ForcedMetadata.Expressions")); Map<String, Object> paramMap = new HashMap<String, Object>(); paramMap.put("SEQNUM",Integer.toString(connectionSequenceNumber)); Messages.outputResourceWithVelocity(out,locale,EDIT_SPEC_HEADER,paramMap); } /** Output the specification body section. * This method is called in the body section of a job page which has selected a pipeline connection of the current type. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt;, &lt;body&gt;, and &lt;form&gt; tags. The name of the * form is "editjob". *@param out is the output to which any HTML should be sent. *@param locale is the preferred local of the output. *@param os is the current pipeline specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param actualSequenceNumber is the connection within the job that has currently been selected. *@param tabName is the current tab name. */ @Override public void outputSpecificationBody(IHTTPOutput out, Locale locale, Specification os, int connectionSequenceNumber, int actualSequenceNumber, String tabName) throws ManifoldCFException, IOException { // Output specification body Map<String, Object> paramMap = new HashMap<String, Object>(); paramMap.put("TABNAME", tabName); paramMap.put("SEQNUM",Integer.toString(connectionSequenceNumber)); paramMap.put("SELECTEDNUM",Integer.toString(actualSequenceNumber)); fillInExpressionsTab(paramMap, os); Messages.outputResourceWithVelocity(out,locale,EDIT_SPEC_EXPRESSIONS,paramMap); } /** Process a specification post. * This method is called at the start of job's edit or view page, whenever there is a possibility that form data for a connection has been * posted. Its purpose is to gather form information and modify the transformation specification accordingly. * The name of the posted form is "editjob". *@param variableContext contains the post data, including binary file-upload information. *@param locale is the preferred local of the output. *@param os is the current pipeline specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@return null if all is well, or a string error message if there is an error that should prevent saving of the job (and cause a redirection to an error page). */ @Override public String processSpecificationPost(IPostParameters variableContext, Locale locale, Specification os, int connectionSequenceNumber) throws ManifoldCFException { // Process specification post String seqPrefix = "s"+connectionSequenceNumber+"_"; String expressionCount = variableContext.getParameter(seqPrefix+"expression_count"); if (expressionCount != null) { int count = Integer.parseInt(expressionCount); // Delete old spec data, including legacy node types we no longer use int i = 0; while (i < os.getChildCount()) { SpecificationNode cn = os.getChild(i); if (cn.getType().equals(NODE_EXPRESSION) || cn.getType().equals(NODE_PAIR) || cn.getType().equals(NODE_FIELDMAP)) os.removeChild(i); else i++; } // Now, go through form data for (int j = 0; j < count; j++) { String op = variableContext.getParameter(seqPrefix+"expression_"+j+"_op"); if (op != null && op.equals("Delete")) continue; String paramName = variableContext.getParameter(seqPrefix+"expression_"+j+"_name"); String paramRemove = variableContext.getParameter(seqPrefix+"expression_"+j+"_remove"); String paramValue = variableContext.getParameter(seqPrefix+"expression_"+j+"_value"); SpecificationNode sn = new SpecificationNode(NODE_EXPRESSION); sn.setAttribute(ATTRIBUTE_PARAMETER,paramName); if (!(paramRemove != null && paramRemove.equals("true"))) sn.setAttribute(ATTRIBUTE_VALUE,paramValue); os.addChild(os.getChildCount(),sn); } // Look for add operation String addOp = variableContext.getParameter(seqPrefix+"expression_op"); if (addOp != null && addOp.equals("Add")) { String paramName = variableContext.getParameter(seqPrefix+"expression_name"); String paramRemove = variableContext.getParameter(seqPrefix+"expression_remove"); String paramValue = variableContext.getParameter(seqPrefix+"expression_value"); SpecificationNode sn = new SpecificationNode(NODE_EXPRESSION); sn.setAttribute(ATTRIBUTE_PARAMETER,paramName); if (!(paramRemove != null && paramRemove.equals("true"))) sn.setAttribute(ATTRIBUTE_VALUE,paramValue); os.addChild(os.getChildCount(),sn); } } String x = variableContext.getParameter(seqPrefix+"keepallmetadata_present"); if (x != null && x.length() > 0) { String keepAll = variableContext.getParameter(seqPrefix+"keepallmetadata"); if (keepAll == null) keepAll = "false"; // About to gather the fieldmapping nodes, so get rid of the old ones. int i = 0; while (i < os.getChildCount()) { SpecificationNode node = os.getChild(i); if (node.getType().equals(NODE_KEEPMETADATA)) os.removeChild(i); else i++; } // Gather the keep all metadata parameter to be the last one SpecificationNode node = new SpecificationNode(NODE_KEEPMETADATA); node.setAttribute(ATTRIBUTE_VALUE, keepAll); // Add the new keepallmetadata config parameter os.addChild(os.getChildCount(), node); } x = variableContext.getParameter(seqPrefix+"filterempty_present"); if (x != null && x.length() > 0) { String filterEmpty = variableContext.getParameter(seqPrefix+"filterempty"); if (filterEmpty == null) filterEmpty = "false"; // About to gather the fieldmapping nodes, so get rid of the old ones. int i = 0; while (i < os.getChildCount()) { SpecificationNode node = os.getChild(i); if (node.getType().equals(NODE_FILTEREMPTY)) os.removeChild(i); else i++; } // Gather the keep all metadata parameter to be the last one SpecificationNode node = new SpecificationNode(NODE_FILTEREMPTY); node.setAttribute(ATTRIBUTE_VALUE, filterEmpty); // Add the new keepallmetadata config parameter os.addChild(os.getChildCount(), node); } return null; } /** View specification. * This method is called in the body section of a job's view page. Its purpose is to present the pipeline specification information to the user. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt; and &lt;body&gt;tags. *@param out is the output to which any HTML should be sent. *@param locale is the preferred local of the output. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param os is the current pipeline specification for this job. */ @Override public void viewSpecification(IHTTPOutput out, Locale locale, Specification os, int connectionSequenceNumber) throws ManifoldCFException, IOException { // View specification Map<String, Object> paramMap = new HashMap<String, Object>(); paramMap.put("SEQNUM",Integer.toString(connectionSequenceNumber)); // Fill in the map with data from all tabs fillInExpressionsTab(paramMap, os); Messages.outputResourceWithVelocity(out,locale,VIEW_SPEC,paramMap); } protected static void fillInExpressionsTab(Map<String,Object> paramMap, Specification os) { final Map<String,Set<String>> expressions = new HashMap<String,Set<String>>(); final Map<String,Set<String>> expressionAdditions = new HashMap<String,Set<String>>(); final Map<String,Set<String>> additions = new HashMap<String,Set<String>>(); String keepAllMetadataValue = "true"; String filterEmptyValue = "true"; for (int i = 0; i < os.getChildCount(); i++) { SpecificationNode sn = os.getChild(i); if (sn.getType().equals(NODE_FIELDMAP)) { String source = sn.getAttributeValue(ATTRIBUTE_SOURCE); String target = sn.getAttributeValue(ATTRIBUTE_TARGET); String targetDisplay; expressions.put(source,new HashSet<String>()); if (target != null) { Set<String> sources = new HashSet<String>(); sources.add("${"+source+"}"); expressions.put(target,sources); } } else if (sn.getType().equals(NODE_PAIR)) { String parameter = sn.getAttributeValue(ATTRIBUTE_PARAMETER); String value = sn.getAttributeValue(ATTRIBUTE_VALUE); // Since the same target is completely superceded by a NODE_PAIR, but NODE_PAIRs // are cumulative, I have to build these completely and then post-process them. Set<String> addition = additions.get(parameter); if (addition == null) { addition = new HashSet<String>(); additions.put(parameter,addition); } addition.add(nonExpressionEscape(value)); } else if (sn.getType().equals(NODE_EXPRESSION)) { String parameter = sn.getAttributeValue(ATTRIBUTE_PARAMETER); String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value == null) { expressionAdditions.put(parameter,new HashSet<String>()); } else { Set<String> expressionAddition = expressionAdditions.get(parameter); if (expressionAddition == null) { expressionAddition = new HashSet<String>(); expressionAdditions.put(parameter,expressionAddition); } expressionAddition.add(value); } } else if (sn.getType().equals(NODE_KEEPMETADATA)) { keepAllMetadataValue = sn.getAttributeValue(ATTRIBUTE_VALUE); } else if (sn.getType().equals(NODE_FILTEREMPTY)) { filterEmptyValue = sn.getAttributeValue(ATTRIBUTE_VALUE); } } // Postprocessing. // Override the moves with the additions for (String parameter : additions.keySet()) { expressions.put(parameter,additions.get(parameter)); } // Override all with expression additions for (String parameter : expressionAdditions.keySet()) { expressions.put(parameter,expressionAdditions.get(parameter)); } // Problem: how to display case where we want a null source?? // A: Special value List<Map<String,String>> pObject = new ArrayList<Map<String,String>>(); String[] keys = expressions.keySet().toArray(new String[0]); java.util.Arrays.sort(keys); // Now, build map for (String key : keys) { Set<String> values = expressions.get(key); if (values.size() == 0) { Map<String,String> record = new HashMap<String,String>(); record.put("parameter",key); record.put("value",""); record.put("isnull","true"); pObject.add(record); } else { String[] valueArray = values.toArray(new String[0]); java.util.Arrays.sort(valueArray); for (String value : valueArray) { Map<String,String> record = new HashMap<String,String>(); record.put("parameter",key); record.put("value",value); record.put("isnull","false"); pObject.add(record); } } } paramMap.put("EXPRESSIONS",pObject); paramMap.put("KEEPALLMETADATA",keepAllMetadataValue); paramMap.put("FILTEREMPTY",filterEmptyValue); } /** This is used to upgrade older constant values to new ones, that won't trigger expression eval. */ protected static String nonExpressionEscape(String input) { // Not doing any escaping yet return input; } /** This is used to unescape text that's been escaped to prevent substitution of ${} expressions. */ protected static String nonExpressionUnescape(String input) { // Not doing any escaping yet return input; } protected static IDataSource append(IDataSource currentValues, IDataSource data) throws IOException, ManifoldCFException { // currentValues and data can either be: // Date[], String[], or Reader[]. // We want to preserve the type in as high a form as possible when we compute the combinations. if (currentValues == null) return data; if (currentValues.getSize() == 0) return currentValues; // Any combination causes conversion to a string, so if we get here, we can read the inputs all // as strings safely. String[] currentStrings = currentValues.getStringForm(); String[] dataStrings = data.getStringForm(); String[] rval = new String[currentStrings.length * dataStrings.length]; int rvalIndex = 0; for (String currentString : currentStrings) { for (String dataString : dataStrings) { rval[rvalIndex++] = currentString + dataString; } } return new StringSource(rval); } public static IDataSource processExpression(String expression, FieldDataFactory sourceDocument) throws IOException, ManifoldCFException { int index = 0; IDataSource input = null; while (true) { // If we're at the end, return the input if (index == expression.length()) return input; // Look for next field specification int field = expression.indexOf("${",index); if (field == -1) return append(input, new StringSource(nonExpressionUnescape(expression.substring(index)))); if (field > 0) input = append(input, new StringSource(nonExpressionUnescape(expression.substring(index,field)))); // Parse the field name, and regular expression (if any) StringBuilder fieldNameBuffer = new StringBuilder(); StringBuilder regExpBuffer = new StringBuilder(); StringBuilder groupNumberBuffer = new StringBuilder(); field = parseArgument(expression, field+2, fieldNameBuffer); field = parseArgument(expression, field, regExpBuffer); field = parseArgument(expression, field, groupNumberBuffer); int fieldEnd = parseToEnd(expression, field); if (fieldEnd == expression.length()) { if (fieldNameBuffer.length() > 0) return append(input, new FieldSource(sourceDocument, fieldNameBuffer.toString(), regExpBuffer.toString(), groupNumberBuffer.toString())); return input; } else { if (fieldNameBuffer.length() > 0) input = append(input, new FieldSource(sourceDocument, fieldNameBuffer.toString(), regExpBuffer.toString(), groupNumberBuffer.toString())); index = fieldEnd; } } } protected static int parseArgument(final String input, int start, final StringBuilder output) { // Parse until we hit the end marker or an unescaped pipe symbol while (true) { if (input.length() == start) return start; char theChar = input.charAt(start); if (theChar == '}') return start; start++; if (theChar == '|') return start; if (theChar == '\\') { if (input.length() == start) return start; theChar = input.charAt(start++); } output.append(theChar); } } protected static int parseToEnd(final String input, int start) { while (true) { if (input.length() == start) return start; char theChar = input.charAt(start++); if (theChar == '}') return start; if (theChar == '\\') { if (input.length() == start) return start; start++; } } } protected static class SpecPacker { private final boolean keepAllMetadata; private final boolean filterEmpty; private final Map<String,Set<String>> expressions = new HashMap<String,Set<String>>(); public SpecPacker(Specification os) { boolean keepAllMetadata = true; boolean filterEmpty = true; final Map<String,Set<String>> additions = new HashMap<String,Set<String>>(); final Map<String,Set<String>> expressionAdditions = new HashMap<String,Set<String>>(); for (int i = 0; i < os.getChildCount(); i++) { SpecificationNode sn = os.getChild(i); if(sn.getType().equals(NODE_KEEPMETADATA)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); keepAllMetadata = Boolean.parseBoolean(value); } else if (sn.getType().equals(NODE_FILTEREMPTY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); filterEmpty = Boolean.parseBoolean(value); } else if (sn.getType().equals(NODE_FIELDMAP)) { String source = sn.getAttributeValue(ATTRIBUTE_SOURCE); String target = sn.getAttributeValue(ATTRIBUTE_TARGET); expressions.put(source,new HashSet<String>()); // Null target means to remove the *source* from the document. if (target != null) { Set<String> sources = new HashSet<String>(); sources.add("${"+source+"}"); expressions.put(target,sources); } } else if (sn.getType().equals(NODE_PAIR)) { String parameter = sn.getAttributeValue(ATTRIBUTE_PARAMETER); String value = sn.getAttributeValue(ATTRIBUTE_VALUE); // Since the same target is completely superceded by a NODE_PAIR, but NODE_PAIRs // are cumulative, I have to build these completely and then post-process them. Set<String> addition = additions.get(parameter); if (addition == null) { addition = new HashSet<String>(); additions.put(parameter,addition); } addition.add(nonExpressionEscape(value)); } else if (sn.getType().equals(NODE_EXPRESSION)) { String parameter = sn.getAttributeValue(ATTRIBUTE_PARAMETER); String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value == null) { expressionAdditions.put(parameter,new HashSet<String>()); } else { Set<String> expressionAddition = expressionAdditions.get(parameter); if (expressionAddition == null) { expressionAddition = new HashSet<String>(); expressionAdditions.put(parameter,expressionAddition); } expressionAddition.add(value); } } } // Override the moves with the additions for (String parameter : additions.keySet()) { expressions.put(parameter,additions.get(parameter)); } // Override all with expression additions for (String parameter : expressionAdditions.keySet()) { expressions.put(parameter,expressionAdditions.get(parameter)); } this.keepAllMetadata = keepAllMetadata; this.filterEmpty = filterEmpty; } public String toPackedString() { StringBuilder sb = new StringBuilder(); int i; final String[] sortArray = expressions.keySet().toArray(new String[0]); java.util.Arrays.sort(sortArray); // Pack the list of keys packList(sb,sortArray,'+'); for (String key : sortArray) { Set<String> values = expressions.get(key); String[] valueArray = values.toArray(new String[0]); java.util.Arrays.sort(valueArray); packList(sb,valueArray,'+'); } // Keep all metadata if (keepAllMetadata) sb.append('+'); else sb.append('-'); // Filter empty if (filterEmpty) sb.append('+'); else sb.append('-'); return sb.toString(); } public Iterator<String> getExpressionKeys() { return expressions.keySet().iterator(); } public Set<String> getExpressionValues(String key) { return expressions.get(key); } public boolean keepAllMetadata() { return keepAllMetadata; } public boolean filterEmpty() { return filterEmpty; } } }
googleapis/google-cloud-java
35,049
java-cloudcontrolspartner/proto-google-cloud-cloudcontrolspartner-v1beta/src/main/java/com/google/cloud/cloudcontrolspartner/v1beta/UpdateCustomerRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/cloudcontrolspartner/v1beta/customers.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.cloudcontrolspartner.v1beta; /** * * * <pre> * Request to update a customer * </pre> * * Protobuf type {@code google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest} */ public final class UpdateCustomerRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) UpdateCustomerRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateCustomerRequest.newBuilder() to construct. private UpdateCustomerRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateCustomerRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateCustomerRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.cloudcontrolspartner.v1beta.CustomersProto .internal_static_google_cloud_cloudcontrolspartner_v1beta_UpdateCustomerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.cloudcontrolspartner.v1beta.CustomersProto .internal_static_google_cloud_cloudcontrolspartner_v1beta_UpdateCustomerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest.class, com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest.Builder.class); } private int bitField0_; public static final int CUSTOMER_FIELD_NUMBER = 1; private com.google.cloud.cloudcontrolspartner.v1beta.Customer customer_; /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the customer field is set. */ @java.lang.Override public boolean hasCustomer() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The customer. */ @java.lang.Override public com.google.cloud.cloudcontrolspartner.v1beta.Customer getCustomer() { return customer_ == null ? com.google.cloud.cloudcontrolspartner.v1beta.Customer.getDefaultInstance() : customer_; } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.cloudcontrolspartner.v1beta.CustomerOrBuilder getCustomerOrBuilder() { return customer_ == null ? com.google.cloud.cloudcontrolspartner.v1beta.Customer.getDefaultInstance() : customer_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCustomer()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCustomer()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest)) { return super.equals(obj); } com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest other = (com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) obj; if (hasCustomer() != other.hasCustomer()) return false; if (hasCustomer()) { if (!getCustomer().equals(other.getCustomer())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCustomer()) { hash = (37 * hash) + CUSTOMER_FIELD_NUMBER; hash = (53 * hash) + getCustomer().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update a customer * </pre> * * Protobuf type {@code google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.cloudcontrolspartner.v1beta.CustomersProto .internal_static_google_cloud_cloudcontrolspartner_v1beta_UpdateCustomerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.cloudcontrolspartner.v1beta.CustomersProto .internal_static_google_cloud_cloudcontrolspartner_v1beta_UpdateCustomerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest.class, com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest.Builder.class); } // Construct using // com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCustomerFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; customer_ = null; if (customerBuilder_ != null) { customerBuilder_.dispose(); customerBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.cloudcontrolspartner.v1beta.CustomersProto .internal_static_google_cloud_cloudcontrolspartner_v1beta_UpdateCustomerRequest_descriptor; } @java.lang.Override public com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest getDefaultInstanceForType() { return com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest build() { com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest buildPartial() { com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest result = new com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.customer_ = customerBuilder_ == null ? customer_ : customerBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) { return mergeFrom( (com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest other) { if (other == com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest .getDefaultInstance()) return this; if (other.hasCustomer()) { mergeCustomer(other.getCustomer()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCustomerFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.cloudcontrolspartner.v1beta.Customer customer_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudcontrolspartner.v1beta.Customer, com.google.cloud.cloudcontrolspartner.v1beta.Customer.Builder, com.google.cloud.cloudcontrolspartner.v1beta.CustomerOrBuilder> customerBuilder_; /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the customer field is set. */ public boolean hasCustomer() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The customer. */ public com.google.cloud.cloudcontrolspartner.v1beta.Customer getCustomer() { if (customerBuilder_ == null) { return customer_ == null ? com.google.cloud.cloudcontrolspartner.v1beta.Customer.getDefaultInstance() : customer_; } else { return customerBuilder_.getMessage(); } } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCustomer(com.google.cloud.cloudcontrolspartner.v1beta.Customer value) { if (customerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } customer_ = value; } else { customerBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCustomer( com.google.cloud.cloudcontrolspartner.v1beta.Customer.Builder builderForValue) { if (customerBuilder_ == null) { customer_ = builderForValue.build(); } else { customerBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCustomer(com.google.cloud.cloudcontrolspartner.v1beta.Customer value) { if (customerBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && customer_ != null && customer_ != com.google.cloud.cloudcontrolspartner.v1beta.Customer.getDefaultInstance()) { getCustomerBuilder().mergeFrom(value); } else { customer_ = value; } } else { customerBuilder_.mergeFrom(value); } if (customer_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCustomer() { bitField0_ = (bitField0_ & ~0x00000001); customer_ = null; if (customerBuilder_ != null) { customerBuilder_.dispose(); customerBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.cloudcontrolspartner.v1beta.Customer.Builder getCustomerBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCustomerFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.cloudcontrolspartner.v1beta.CustomerOrBuilder getCustomerOrBuilder() { if (customerBuilder_ != null) { return customerBuilder_.getMessageOrBuilder(); } else { return customer_ == null ? com.google.cloud.cloudcontrolspartner.v1beta.Customer.getDefaultInstance() : customer_; } } /** * * * <pre> * Required. The customer to update * Format: * `organizations/{organization}/locations/{location}/customers/{customer}` * </pre> * * <code> * .google.cloud.cloudcontrolspartner.v1beta.Customer customer = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudcontrolspartner.v1beta.Customer, com.google.cloud.cloudcontrolspartner.v1beta.Customer.Builder, com.google.cloud.cloudcontrolspartner.v1beta.CustomerOrBuilder> getCustomerFieldBuilder() { if (customerBuilder_ == null) { customerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudcontrolspartner.v1beta.Customer, com.google.cloud.cloudcontrolspartner.v1beta.Customer.Builder, com.google.cloud.cloudcontrolspartner.v1beta.CustomerOrBuilder>( getCustomer(), getParentForChildren(), isClean()); customer_ = null; } return customerBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. The list of fields to update * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) } // @@protoc_insertion_point(class_scope:google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest) private static final com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest(); } public static com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateCustomerRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateCustomerRequest>() { @java.lang.Override public UpdateCustomerRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateCustomerRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateCustomerRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/drill
34,848
exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.rowSet; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.util.List; import org.apache.drill.categories.RowSetTest; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet; import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet; import org.apache.drill.exec.record.VectorContainer; import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.exec.record.metadata.TupleMetadata; import org.apache.drill.exec.vector.NullableBigIntVector; import org.apache.drill.exec.vector.NullableFloat8Vector; import org.apache.drill.exec.vector.NullableIntVector; import org.apache.drill.exec.vector.NullableVarCharVector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.accessor.ArrayReader; import org.apache.drill.exec.vector.accessor.ArrayWriter; import org.apache.drill.exec.vector.accessor.ObjectReader; import org.apache.drill.exec.vector.accessor.ObjectType; import org.apache.drill.exec.vector.accessor.ObjectWriter; import org.apache.drill.exec.vector.accessor.ScalarReader; import org.apache.drill.exec.vector.accessor.ScalarWriter; import org.apache.drill.exec.vector.accessor.TupleReader; import org.apache.drill.exec.vector.accessor.TupleWriter; import org.apache.drill.exec.vector.accessor.VariantReader; import org.apache.drill.exec.vector.accessor.VariantWriter; import org.apache.drill.exec.vector.complex.ListVector; import org.apache.drill.exec.vector.complex.MapVector; import org.apache.drill.exec.vector.complex.UnionVector; import org.apache.drill.test.SubOperatorTest; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Tests for readers and writers for union and list types. * <p> * Note that the union type is only partially supported in Drill. * The list type is unsupported. (However, the list type works in * the schema builder, row set writer, row set reader and the * result set builder. It does not, however, work in the Project * and other operators. Some assembly required for future use.) */ @Category(RowSetTest.class) public class TestVariantAccessors extends SubOperatorTest { @Test public void testBuildRowSetUnion() { final TupleMetadata schema = new SchemaBuilder() // Union with simple and complex types .addUnion("u") .addType(MinorType.INT) .addMap() .addNullable("c", MinorType.BIGINT) .addNullable("d", MinorType.VARCHAR) .resumeUnion() .addList() .addType(MinorType.VARCHAR) .resumeUnion() .resumeSchema() .buildSchema(); final ExtendableRowSet rowSet = fixture.rowSet(schema); final VectorContainer vc = rowSet.container(); assertEquals(1, vc.getNumberOfColumns()); // Single union final ValueVector vector = vc.getValueVector(0).getValueVector(); assertTrue(vector instanceof UnionVector); final UnionVector union = (UnionVector) vector; final MapVector typeMap = union.getTypeMap(); ValueVector member = typeMap.getChild(MinorType.INT.name()); assertTrue(member instanceof NullableIntVector); // Inner map member = typeMap.getChild(MinorType.MAP.name()); assertTrue(member instanceof MapVector); member = typeMap.getChild(MinorType.MAP.name()); assertTrue(member instanceof MapVector); final MapVector childMap = (MapVector) member; ValueVector mapMember = childMap.getChild("c"); assertNotNull(mapMember); assertTrue(mapMember instanceof NullableBigIntVector); mapMember = childMap.getChild("d"); assertNotNull(mapMember); assertTrue(mapMember instanceof NullableVarCharVector); // Inner list member = typeMap.getChild(MinorType.LIST.name()); assertTrue(member instanceof ListVector); final ListVector list = (ListVector) member; assertTrue(list.getDataVector() instanceof NullableVarCharVector); rowSet.clear(); } /** * Test a variant (AKA "union vector") at the top level, using * just scalar values. */ @Test public void testScalarVariant() { final TupleMetadata schema = new SchemaBuilder() .addUnion("u") .addType(MinorType.INT) .addType(MinorType.VARCHAR) .addType(MinorType.FLOAT8) .resumeSchema() .buildSchema(); final ExtendableRowSet rs = fixture.rowSet(schema); final RowSetWriter writer = rs.writer(); // Sanity check of writer structure final ObjectWriter wo = writer.column(0); assertEquals(ObjectType.VARIANT, wo.type()); final VariantWriter vw = wo.variant(); assertSame(vw, writer.variant(0)); assertSame(vw, writer.variant("u")); assertTrue(vw.hasType(MinorType.INT)); assertTrue(vw.hasType(MinorType.VARCHAR)); assertTrue(vw.hasType(MinorType.FLOAT8)); // Write values of different types vw.scalar(MinorType.INT).setInt(10); writer.save(); vw.scalar(MinorType.VARCHAR).setString("fred"); writer.save(); // The entire variant is null vw.setNull(); writer.save(); vw.scalar(MinorType.FLOAT8).setDouble(123.45); writer.save(); // Strange case: just the value is null, but the variant // is not null. vw.scalar(MinorType.INT).setNull(); writer.save(); // Marker to avoid fill-empty issues (fill-empties tested elsewhere.) vw.scalar(MinorType.INT).setInt(20); writer.save(); final SingleRowSet result = writer.done(); assertEquals(6, result.rowCount()); // Read the values. final RowSetReader reader = result.reader(); // Sanity check of structure final ObjectReader ro = reader.column(0); assertEquals(ObjectType.VARIANT, ro.type()); final VariantReader vr = ro.variant(); assertSame(vr, reader.variant(0)); assertSame(vr, reader.variant("u")); for (final MinorType type : MinorType.values()) { if (type == MinorType.INT || type == MinorType.VARCHAR || type == MinorType.FLOAT8) { assertTrue(vr.hasType(type)); } else { assertFalse(vr.hasType(type)); } } // Can get readers up front final ScalarReader intReader = vr.scalar(MinorType.INT); final ScalarReader strReader = vr.scalar(MinorType.VARCHAR); final ScalarReader floatReader = vr.scalar(MinorType.FLOAT8); // Verify the data // Int 10 assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.INT); assertSame(intReader, vr.scalar()); assertNotNull(vr.member()); assertSame(vr.scalar(), vr.member().scalar()); assertFalse(intReader.isNull()); assertEquals(10, intReader.getInt()); assertTrue(strReader.isNull()); assertTrue(floatReader.isNull()); // String "fred" assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.VARCHAR); assertSame(strReader, vr.scalar()); assertFalse(strReader.isNull()); assertEquals("fred", strReader.getString()); assertTrue(intReader.isNull()); assertTrue(floatReader.isNull()); // Null value assertTrue(reader.next()); assertTrue(vr.isNull()); assertNull(vr.dataType()); assertNull(vr.scalar()); assertTrue(intReader.isNull()); assertTrue(strReader.isNull()); assertTrue(floatReader.isNull()); // Double 123.45 assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.FLOAT8); assertSame(floatReader, vr.scalar()); assertFalse(floatReader.isNull()); assertEquals(123.45, vr.scalar().getDouble(), 0.001); assertTrue(intReader.isNull()); assertTrue(strReader.isNull()); // Strange case: null int (but union is not null) assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.INT); assertTrue(intReader.isNull()); // Int 20 assertTrue(reader.next()); assertFalse(vr.isNull()); assertFalse(intReader.isNull()); assertEquals(20, intReader.getInt()); assertFalse(reader.next()); result.clear(); } @Test public void testBuildRowSetScalarList() { final TupleMetadata schema = new SchemaBuilder() // Top-level single-element list .addList("list2") .addType(MinorType.VARCHAR) .resumeSchema() .buildSchema(); final ExtendableRowSet rowSet = fixture.rowSet(schema); final VectorContainer vc = rowSet.container(); assertEquals(1, vc.getNumberOfColumns()); // Single-type list final ValueVector vector = vc.getValueVector(0).getValueVector(); assertTrue(vector instanceof ListVector); final ListVector list = (ListVector) vector; assertTrue(list.getDataVector() instanceof NullableVarCharVector); rowSet.clear(); } @Test public void testBuildRowSetUnionArray() { final TupleMetadata schema = new SchemaBuilder() // List with multiple types .addList("list1") .addType(MinorType.BIGINT) .addMap() .addNullable("a", MinorType.INT) .addNullable("b", MinorType.VARCHAR) .resumeUnion() // Nested single-element list .addList() .addType(MinorType.FLOAT8) .resumeUnion() .resumeSchema() .buildSchema(); final ExtendableRowSet rowSet = fixture.rowSet(schema); final VectorContainer vc = rowSet.container(); assertEquals(1, vc.getNumberOfColumns()); // List with complex internal structure final ValueVector vector = vc.getValueVector(0).getValueVector(); assertTrue(vector instanceof ListVector); final ListVector list = (ListVector) vector; assertTrue(list.getDataVector() instanceof UnionVector); final UnionVector union = (UnionVector) list.getDataVector(); // Union inside the list final MajorType unionType = union.getField().getType(); final List<MinorType> types = unionType.getSubTypeList(); assertEquals(3, types.size()); assertTrue(types.contains(MinorType.BIGINT)); assertTrue(types.contains(MinorType.MAP)); assertTrue(types.contains(MinorType.LIST)); final MapVector typeMap = union.getTypeMap(); ValueVector member = typeMap.getChild(MinorType.BIGINT.name()); assertTrue(member instanceof NullableBigIntVector); // Map inside the list member = typeMap.getChild(MinorType.MAP.name()); assertTrue(member instanceof MapVector); final MapVector childMap = (MapVector) member; ValueVector mapMember = childMap.getChild("a"); assertNotNull(mapMember); assertTrue(mapMember instanceof NullableIntVector); mapMember = childMap.getChild("b"); assertNotNull(mapMember); assertTrue(mapMember instanceof NullableVarCharVector); // Single-type list inside the outer list member = typeMap.getChild(MinorType.LIST.name()); assertTrue(member instanceof ListVector); final ListVector childList = (ListVector) member; assertTrue(childList.getDataVector() instanceof NullableFloat8Vector); rowSet.clear(); } /** * Test a variant (AKA "union vector") at the top level which * includes a map. */ @Test public void testUnionWithMap() { final TupleMetadata schema = new SchemaBuilder() .addUnion("u") .addType(MinorType.VARCHAR) .addMap() .addNullable("a", MinorType.INT) .addNullable("b", MinorType.VARCHAR) .resumeUnion() .resumeSchema() .buildSchema(); SingleRowSet result; // Write values { final ExtendableRowSet rs = fixture.rowSet(schema); final RowSetWriter writer = rs.writer(); // Sanity check of writer structure final ObjectWriter wo = writer.column(0); assertEquals(ObjectType.VARIANT, wo.type()); final VariantWriter vw = wo.variant(); assertTrue(vw.hasType(MinorType.VARCHAR)); final ObjectWriter strObj = vw.member(MinorType.VARCHAR); final ScalarWriter strWriter = strObj.scalar(); assertSame(strWriter, vw.scalar(MinorType.VARCHAR)); assertTrue(vw.hasType(MinorType.MAP)); final ObjectWriter mapObj = vw.member(MinorType.MAP); final TupleWriter mWriter = mapObj.tuple(); assertSame(mWriter, vw.tuple()); final ScalarWriter aWriter = mWriter.scalar("a"); final ScalarWriter bWriter = mWriter.scalar("b"); // First row: string "first" vw.setType(MinorType.VARCHAR); strWriter.setString("first"); writer.save(); // Second row: a map vw.setType(MinorType.MAP); aWriter.setInt(20); bWriter.setString("fred"); writer.save(); // Third row: null vw.setNull(); writer.save(); // Fourth row: map with a null string vw.setType(MinorType.MAP); aWriter.setInt(40); bWriter.setNull(); writer.save(); // Fifth row: string "last" vw.setType(MinorType.VARCHAR); strWriter.setString("last"); writer.save(); result = writer.done(); assertEquals(5, result.rowCount()); } // Read the values. { final RowSetReader reader = result.reader(); // Sanity check of structure final ObjectReader ro = reader.column(0); assertEquals(ObjectType.VARIANT, ro.type()); final VariantReader vr = ro.variant(); assertTrue(vr.hasType(MinorType.VARCHAR)); final ObjectReader strObj = vr.member(MinorType.VARCHAR); final ScalarReader strReader = strObj.scalar(); assertSame(strReader, vr.scalar(MinorType.VARCHAR)); assertTrue(vr.hasType(MinorType.MAP)); final ObjectReader mapObj = vr.member(MinorType.MAP); final TupleReader mReader = mapObj.tuple(); assertSame(mReader, vr.tuple()); final ScalarReader aReader = mReader.scalar("a"); final ScalarReader bReader = mReader.scalar("b"); // First row: string "first" assertTrue(reader.next()); assertFalse(vr.isNull()); assertEquals(MinorType.VARCHAR, vr.dataType()); assertFalse(strReader.isNull()); assertTrue(mReader.isNull()); assertEquals("first", strReader.getString()); // Second row: a map assertTrue(reader.next()); assertFalse(vr.isNull()); assertEquals(MinorType.MAP, vr.dataType()); assertTrue(strReader.isNull()); assertFalse(mReader.isNull()); assertFalse(aReader.isNull()); assertEquals(20, aReader.getInt()); assertFalse(bReader.isNull()); assertEquals("fred", bReader.getString()); // Third row: null assertTrue(reader.next()); assertTrue(vr.isNull()); assertTrue(strReader.isNull()); assertTrue(mReader.isNull()); assertTrue(aReader.isNull()); assertTrue(bReader.isNull()); // Fourth row: map with a null string assertTrue(reader.next()); assertEquals(MinorType.MAP, vr.dataType()); assertEquals(40, aReader.getInt()); assertTrue(bReader.isNull()); // Fifth row: string "last" assertTrue(reader.next()); assertEquals(MinorType.VARCHAR, vr.dataType()); assertEquals("last", strReader.getString()); assertFalse(reader.next()); } result.clear(); } /** * Test a scalar list. Should act just like a repeated type, with the * addition of allowing the list for a row to be null. But, a list * writer does not do auto-increment, so we must do that explicitly * after each write. */ @Test public void testScalarList() { final TupleMetadata schema = new SchemaBuilder() .addList("list") .addType(MinorType.VARCHAR) .resumeSchema() .buildSchema(); final ExtendableRowSet rowSet = fixture.rowSet(schema); final RowSetWriter writer = rowSet.writer(); { final ObjectWriter listObj = writer.column(0); assertEquals(ObjectType.ARRAY, listObj.type()); final ArrayWriter listArray = listObj.array(); // The list contains only a scalar. But, because lists can, // in general, contain multiple contents, the list requires // an explicit save after each entry. final ObjectWriter itemObj = listArray.entry(); assertEquals(ObjectType.SCALAR, itemObj.type()); final ScalarWriter strWriter = itemObj.scalar(); // First row: two strings and a null // Unlike a repeated type, a list can mark individual elements // as null. // List will automatically detect that data was written. strWriter.setString("fred"); listArray.save(); strWriter.setNull(); listArray.save(); strWriter.setString("wilma"); listArray.save(); writer.save(); // Second row: null writer.save(); // Third row: one string strWriter.setString("dino"); listArray.save(); writer.save(); // Fourth row: empty array. Note that there is no trigger // to say that the column is not null, so we have to do it // explicitly. listArray.setNull(false); writer.save(); // Last row: a null string and non-null strWriter.setNull(); listArray.save(); strWriter.setString("pebbles"); listArray.save(); writer.save(); } final SingleRowSet result = writer.done(); assertEquals(5, result.rowCount()); { final RowSetReader reader = result.reader(); final ObjectReader listObj = reader.column(0); assertEquals(ObjectType.ARRAY, listObj.type()); final ArrayReader listArray = listObj.array(); // The list is a repeated scalar assertEquals(ObjectType.SCALAR, listArray.entry().type()); final ScalarReader strReader = listArray.scalar(); // First row: two strings and a null assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(3, listArray.size()); assertTrue(listArray.next()); assertFalse(strReader.isNull()); assertEquals("fred", strReader.getString()); assertTrue(listArray.next()); assertTrue(strReader.isNull()); assertTrue(listArray.next()); assertFalse(strReader.isNull()); assertEquals("wilma", strReader.getString()); assertFalse(listArray.next()); // Second row: null assertTrue(reader.next()); assertTrue(listArray.isNull()); assertEquals(0, listArray.size()); // Third row: one string assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(1, listArray.size()); assertTrue(listArray.next()); assertEquals("dino", strReader.getString()); assertFalse(listArray.next()); // Fourth row: empty array. assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(0, listArray.size()); assertFalse(listArray.next()); // Last row: a null string and non-null assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(2, listArray.size()); assertTrue(listArray.next()); assertTrue(strReader.isNull()); assertTrue(listArray.next()); assertFalse(strReader.isNull()); assertEquals("pebbles", strReader.getString()); assertFalse(listArray.next()); assertFalse(reader.next()); } result.clear(); } /** * List of maps. Like a repeated map, but each list entry can be * null. */ @Test public void testListOfMaps() { final TupleMetadata schema = new SchemaBuilder() .addList("list") .addMap() .addNullable("a", MinorType.INT) .addNullable("b", MinorType.VARCHAR) .resumeUnion() .resumeSchema() .buildSchema(); final ExtendableRowSet rowSet = fixture.rowSet(schema); final RowSetWriter writer = rowSet.writer(); { final ObjectWriter listObj = writer.column("list"); assertEquals(ObjectType.ARRAY, listObj.type()); final ArrayWriter listArray = listObj.array(); final ObjectWriter itemObj = listArray.entry(); assertEquals(ObjectType.TUPLE, itemObj.type()); final TupleWriter mapWriter = itemObj.tuple(); final ScalarWriter aWriter = mapWriter.scalar("a"); final ScalarWriter bWriter = mapWriter.scalar("b"); // First row: // {1, "fred"}, null, {3, null} aWriter.setInt(1); bWriter.setString("fred"); listArray.save(); // Can't mark the map as null. Instead, we simply skip // the map and the contained nullable members will automatically // back-fill each entry with a null value. listArray.save(); aWriter.setInt(3); bWriter.setNull(); listArray.save(); writer.save(); // Second row: null writer.save(); // Third row: {null, "dino"} aWriter.setNull(); bWriter.setString("dino"); listArray.save(); writer.save(); // Fourth row: empty array. Note that there is no trigger // to say that the column is not null, so we have to do it // explicitly. listArray.setNull(false); writer.save(); // Last row: {4, "pebbles"} aWriter.setInt(4); bWriter.setString("pebbles"); listArray.save(); writer.save(); } final SingleRowSet result = writer.done(); assertEquals(5, result.rowCount()); { final RowSetReader reader = result.reader(); final ObjectReader listObj = reader.column("list"); assertEquals(ObjectType.ARRAY, listObj.type()); final ArrayReader listArray = listObj.array(); assertEquals(ObjectType.TUPLE, listArray.entry().type()); final TupleReader mapReader = listArray.tuple(); final ScalarReader aReader = mapReader.scalar("a"); final ScalarReader bReader = mapReader.scalar("b"); // First row: // {1, "fred"}, null, {3, null} assertTrue(reader.next()); assertFalse(listArray.isNull()); assertFalse(mapReader.isNull()); assertEquals(3, listArray.size()); assertTrue(listArray.next()); assertFalse(aReader.isNull()); assertEquals(1, aReader.getInt()); assertFalse(bReader.isNull()); assertEquals("fred", bReader.getString()); assertTrue(listArray.next()); // Awkward: the map has no null state, but its // members do. assertTrue(aReader.isNull()); assertTrue(bReader.isNull()); assertTrue(listArray.next()); assertFalse(aReader.isNull()); assertEquals(3, aReader.getInt()); assertTrue(bReader.isNull()); assertFalse(listArray.next()); // Second row: null assertTrue(reader.next()); assertTrue(listArray.isNull()); assertEquals(0, listArray.size()); // Third row: {null, "dino"} assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(1, listArray.size()); assertTrue(listArray.next()); assertTrue(aReader.isNull()); assertFalse(bReader.isNull()); assertEquals("dino", bReader.getString()); assertFalse(listArray.next()); // Fourth row: empty array. assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(0, listArray.size()); assertFalse(listArray.next()); // Last row: {4, "pebbles"} assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(1, listArray.size()); assertTrue(listArray.next()); assertEquals(4, aReader.getInt()); assertEquals("pebbles", bReader.getString()); assertFalse(listArray.next()); assertFalse(reader.next()); } result.clear(); } /** * Test a union list. */ @Test public void testListOfUnions() { final TupleMetadata schema = new SchemaBuilder() .addList("list") .addType(MinorType.INT) .addType(MinorType.VARCHAR) .resumeSchema() .buildSchema(); final ExtendableRowSet rowSet = fixture.rowSet(schema); final RowSetWriter writer = rowSet.writer(); { final ObjectWriter listObj = writer.column(0); assertEquals(ObjectType.ARRAY, listObj.type()); final ArrayWriter listArray = listObj.array(); final ObjectWriter itemObj = listArray.entry(); assertEquals(ObjectType.VARIANT, itemObj.type()); final VariantWriter variant = itemObj.variant(); final ScalarWriter intWriter = variant.scalar(MinorType.INT); final ScalarWriter strWriter = variant.scalar(MinorType.VARCHAR); // First row: (1, "two", 3) variant.setType(MinorType.INT); intWriter.setInt(1); listArray.save(); variant.setType(MinorType.VARCHAR); strWriter.setString("two"); listArray.save(); variant.setType(MinorType.INT); intWriter.setInt(3); listArray.save(); writer.save(); // Second row: null writer.save(); // Third row: 4, null, "six", null int, null string variant.setType(MinorType.INT); intWriter.setInt(4); listArray.save(); variant.setNull(); listArray.save(); variant.setType(MinorType.VARCHAR); strWriter.setString("six"); listArray.save(); variant.setType(MinorType.INT); intWriter.setNull(); listArray.save(); variant.setType(MinorType.VARCHAR); intWriter.setNull(); listArray.save(); writer.save(); // Fourth row: empty array. listArray.setNull(false); writer.save(); // Fifth row: 9 variant.setType(MinorType.INT); intWriter.setInt(9); listArray.save(); writer.save(); } final SingleRowSet result = writer.done(); assertEquals(5, result.rowCount()); { final RowSetReader reader = result.reader(); final ObjectReader listObj = reader.column(0); assertEquals(ObjectType.ARRAY, listObj.type()); final ArrayReader listArray = listObj.array(); assertEquals(ObjectType.VARIANT, listArray.entry().type()); final VariantReader variant = listArray.variant(); final ScalarReader intReader = variant.scalar(MinorType.INT); final ScalarReader strReader = variant.scalar(MinorType.VARCHAR); // First row: (1, "two", 3) assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(3, listArray.size()); assertTrue(listArray.next()); assertEquals(MinorType.INT, variant.dataType()); assertFalse(intReader.isNull()); assertTrue(strReader.isNull()); assertEquals(1, intReader.getInt()); assertEquals(1, variant.scalar().getInt()); assertTrue(listArray.next()); assertEquals(MinorType.VARCHAR, variant.dataType()); assertTrue(intReader.isNull()); assertFalse(strReader.isNull()); assertEquals("two", strReader.getString()); assertEquals("two", variant.scalar().getString()); assertTrue(listArray.next()); assertEquals(MinorType.INT, variant.dataType()); assertEquals(3, intReader.getInt()); assertFalse(listArray.next()); // Second row: null assertTrue(reader.next()); assertTrue(listArray.isNull()); assertEquals(0, listArray.size()); // Third row: 4, null, "six", null int, null string assertTrue(reader.next()); assertEquals(5, listArray.size()); assertTrue(listArray.next()); assertEquals(4, intReader.getInt()); assertTrue(listArray.next()); assertTrue(variant.isNull()); assertTrue(listArray.next()); assertEquals("six", strReader.getString()); assertTrue(listArray.next()); assertEquals(MinorType.INT, variant.dataType()); assertTrue(intReader.isNull()); assertTrue(listArray.next()); assertEquals(MinorType.VARCHAR, variant.dataType()); assertTrue(strReader.isNull()); assertFalse(listArray.next()); // Fourth row: empty array. assertTrue(reader.next()); assertFalse(listArray.isNull()); assertEquals(0, listArray.size()); assertFalse(listArray.next()); // Fifth row: 9 assertTrue(reader.next()); assertEquals(1, listArray.size()); assertTrue(listArray.next()); assertEquals(9, intReader.getInt()); assertFalse(listArray.next()); assertFalse(reader.next()); } result.clear(); } /** * Test a variant (AKA "union vector") at the top level, using * just scalar values. */ @Test public void testAddTypes() { final TupleMetadata batchSchema = new SchemaBuilder() .addNullable("v", MinorType.UNION) .buildSchema(); final ExtendableRowSet rs = fixture.rowSet(batchSchema); final RowSetWriter writer = rs.writer(); // Sanity check of writer structure final ObjectWriter wo = writer.column(0); assertEquals(ObjectType.VARIANT, wo.type()); final VariantWriter vw = wo.variant(); assertSame(vw, writer.variant(0)); assertSame(vw, writer.variant("v")); for (final MinorType type : MinorType.values()) { assertFalse(vw.hasType(type)); } // Write values of different types vw.scalar(MinorType.INT).setInt(10); assertTrue(vw.hasType(MinorType.INT)); assertFalse(vw.hasType(MinorType.VARCHAR)); writer.save(); vw.scalar(MinorType.VARCHAR).setString("fred"); assertTrue(vw.hasType(MinorType.VARCHAR)); writer.save(); vw.setNull(); writer.save(); vw.scalar(MinorType.FLOAT8).setDouble(123.45); assertTrue(vw.hasType(MinorType.INT)); assertTrue(vw.hasType(MinorType.FLOAT8)); writer.save(); final SingleRowSet result = writer.done(); assertEquals(4, result.rowCount()); // Read the values. final RowSetReader reader = result.reader(); // Sanity check of structure final ObjectReader ro = reader.column(0); assertEquals(ObjectType.VARIANT, ro.type()); final VariantReader vr = ro.variant(); assertSame(vr, reader.variant(0)); assertSame(vr, reader.variant("v")); for (final MinorType type : MinorType.values()) { if (type == MinorType.INT || type == MinorType.VARCHAR || type == MinorType.FLOAT8) { assertTrue(vr.hasType(type)); } else { assertFalse(vr.hasType(type)); } } // Verify the data assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.INT); assertSame(vr.scalar(MinorType.INT), vr.scalar()); assertNotNull(vr.member()); assertSame(vr.scalar(), vr.member().scalar()); assertEquals(10, vr.scalar().getInt()); assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.VARCHAR); assertSame(vr.scalar(MinorType.VARCHAR), vr.scalar()); assertEquals("fred", vr.scalar().getString()); assertTrue(reader.next()); assertTrue(vr.isNull()); assertNull(vr.dataType()); assertNull(vr.scalar()); assertTrue(reader.next()); assertFalse(vr.isNull()); assertSame(vr.dataType(), MinorType.FLOAT8); assertSame(vr.scalar(MinorType.FLOAT8), vr.scalar()); assertEquals(123.45, vr.scalar().getDouble(), 0.001); assertFalse(reader.next()); result.clear(); } /** * Test a variant (AKA "union vector") at the top level which includes * a list. */ @Test public void testUnionWithList() { final TupleMetadata schema = new SchemaBuilder() .addUnion("u") .addType(MinorType.INT) .addList() .addType(MinorType.VARCHAR) .resumeUnion() .resumeSchema() .buildSchema(); SingleRowSet result; // Write values { final ExtendableRowSet rs = fixture.rowSet(schema); final RowSetWriter writer = rs.writer(); final VariantWriter vw = writer.variant("u"); assertTrue(vw.hasType(MinorType.INT)); final ScalarWriter intWriter = vw.scalar(MinorType.INT); assertTrue(vw.hasType(MinorType.LIST)); final ArrayWriter aWriter = vw.array(); final ScalarWriter strWriter = aWriter.scalar(); // Row 1: 1, ["fred", "barney"] intWriter.setInt(1); strWriter.setString("fred"); aWriter.save(); strWriter.setString("barney"); aWriter.save(); writer.save(); // Row 2, 2, ["wilma", "betty"] intWriter.setInt(2); strWriter.setString("wilma"); aWriter.save(); strWriter.setString("betty"); aWriter.save(); writer.save(); result = writer.done(); assertEquals(2, result.rowCount()); } // Read the values. { final RowSetReader reader = result.reader(); final VariantReader vr = reader.variant("u"); assertTrue(vr.hasType(MinorType.INT)); final ScalarReader intReader = vr.scalar(MinorType.INT); assertTrue(vr.hasType(MinorType.LIST)); final ArrayReader aReader = vr.array(); final ScalarReader strReader = aReader.scalar(); assertTrue(reader.next()); assertEquals(1, intReader.getInt()); assertEquals(2, aReader.size()); assertTrue(aReader.next()); assertEquals("fred", strReader.getString()); assertTrue(aReader.next()); assertEquals("barney", strReader.getString()); assertFalse(aReader.next()); assertTrue(reader.next()); assertEquals(2, intReader.getInt()); assertEquals(2, aReader.size()); assertTrue(aReader.next()); assertEquals("wilma", strReader.getString()); assertTrue(aReader.next()); assertEquals("betty", strReader.getString()); assertFalse(aReader.next()); assertFalse(reader.next()); } result.clear(); } // TODO: Repeated list }
apache/xmlgraphics-batik
35,718
batik-xml/src/main/java/org/apache/batik/xml/XMLCharacters.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.xml; /** * This class contains the definitions of the arrays used to find * the type of the characters found in an XML document. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ public class XMLCharacters { /** * The bit array representing the first character of an XML name. */ public static final int[] NAME_FIRST_CHARACTER = { 0,67108864,-2013265922,134217726,0,0,-8388609,-8388609,-1,2146697215, -514,2147483647,-1,-1,-8177,-63832065,16777215,0,-65536,-1,-1, -134217217,3,0,0,0,0,0,-10432,-5,1417641983,1048573,-8194,-1, -536936449,-1,-65533,-1,-58977,54513663,0,-131072,41943039,-2,127,0, -65536,460799,0,134217726,2046,-131072,-1,2097151999,3112959,96,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,-32,603979775,-16777216,3,-417824, 63307263,-1342177280,196611,-423968,57540095,1577058304,1835008, -282656,602799615,0,1,-417824,600702463,-1342177280,3,-700594208, 62899992,0,0,-139296,66059775,0,3,-139296,66059775,1073741824,3, -139296,67108351,0,3,0,0,0,0,-2,884735,63,0,-17816170,537750702,31,0, 0,0,-257,1023,0,0,0,0,0,0,0,0,0,-1,-65473,8388607,514797,1342177280, -2110697471,2908843,1073741824,-176109312,7,33622016,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,-1,-1,268435455,-1,-1, 67108863,1061158911,-1,-1426112705,1073741823,-1,1608515583, 265232348,534519807,0,0,0,0,0,0,0,0,0,19520,0,0,7,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,128,1022,-2,-1,2097151,-2,-1,134217727,-32,8191,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, 63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, }; /** * The bit array representing the first character of an XML 1.1 name. */ public static final int[] NAME11_FIRST_CHARACTER = { 0,32,2147483617,2147483616,0,0,-257,-257,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,0,0,0,65533,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,786432,0,0,65535,-65536,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-65536,2147483647,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-65536,65535,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-4, }; /** * The bit array representing a character compositing an XML name. */ public static final int[] NAME_CHARACTER = { 0,134176768,-2013265922,134217726,0,8388608,-8388609,-8388609,-1, 2146697215,-514,2147483647,-1,-1,-8177,-63832065,16777215,0,-65536, -1,-1,-134217217,196611,0,-1,-1,63,3,-10304,-5,1417641983,1048573, -8194,-1,-536936449,-1,-65413,-1,-58977,54513663,0,-131072,41943039, -2,-130945,-1140850693,-65514,460799,0,134217726,524287,-64513,-1, 2097151999,-1081345,67059199,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-18, -201326593,-14794753,65487,-417810,-741999105,-1333773921,262095, -423964,-747766273,1577073031,2097088,-282642,-202506753,15295,65473, -417810,-204603905,-1329579633,65475,-700594196,-1010841832,8404423, 65408,-139282,-1007682049,6307295,65475,-139284,-1007682049, 1080049119,65475,-139284,-1006633473,8404431,65475,0,0,0,0,-2, 134184959,67076095,0,-17816170,1006595246,67059551,0,50331648, -1029700609,-257,-130049,-21032993,50216959,0,0,0,0,0,0,0,-1,-65473, 8388607,514797,1342177280,-2110697471,2908843,1073741824,-176109312, 7,33622016,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1, -1,-1,-1,268435455,-1,-1,67108863,1061158911,-1,-1426112705, 1073741823,-1,1608515583,265232348,534519807,0,0,0,0,0,0,536805376,2, 0,19520,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,160,4128766,-2,-1,1713373183, -2,-1,2013265919,-32,8191,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,63,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,15,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0, }; /** * The bit array representing a character compositing an XML 1.1 name. */ public static final int[] NAME11_CHARACTER = { 0,458720,2147483617,2147483616,0,256,-257,-257,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-3,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,786432,1,-2147483648,65535,-65536,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-65536, 2147483647,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-65536,65535,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-4, }; /** * The bit array representing a valid XML character in the unicode range [0-FFFF]. */ public static final int[] XML_CHARACTER = { 9728,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1, -1,-1,-1,-1,1073741823, }; /** * The bit array representing the valid XML public ID characters. */ public static final int[] PUBLIC_ID_CHARACTER = { 9216,-1342177349,-2013265921,134217726, }; /** * The bit array representing the valid XML version characters. */ public static final int[] VERSION_CHARACTER = { 0,134176768,-2013265922,134217726, }; /** * The bit array representing an alphabetic character. */ public static final int[] ALPHABETIC_CHARACTER = { 0,0,134217726,134217726, }; /** * This class does not need to be instantiated. */ protected XMLCharacters() { } }
googleapis/google-cloud-java
35,025
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/ListBuildsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * Response including listed builds. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.ListBuildsResponse} */ public final class ListBuildsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.ListBuildsResponse) ListBuildsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListBuildsResponse.newBuilder() to construct. private ListBuildsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListBuildsResponse() { builds_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListBuildsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ListBuildsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ListBuildsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.ListBuildsResponse.class, com.google.cloudbuild.v1.ListBuildsResponse.Builder.class); } public static final int BUILDS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloudbuild.v1.Build> builds_; /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloudbuild.v1.Build> getBuildsList() { return builds_; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloudbuild.v1.BuildOrBuilder> getBuildsOrBuilderList() { return builds_; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ @java.lang.Override public int getBuildsCount() { return builds_.size(); } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ @java.lang.Override public com.google.cloudbuild.v1.Build getBuilds(int index) { return builds_.get(index); } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ @java.lang.Override public com.google.cloudbuild.v1.BuildOrBuilder getBuildsOrBuilder(int index) { return builds_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < builds_.size(); i++) { output.writeMessage(1, builds_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < builds_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, builds_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.ListBuildsResponse)) { return super.equals(obj); } com.google.cloudbuild.v1.ListBuildsResponse other = (com.google.cloudbuild.v1.ListBuildsResponse) obj; if (!getBuildsList().equals(other.getBuildsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getBuildsCount() > 0) { hash = (37 * hash) + BUILDS_FIELD_NUMBER; hash = (53 * hash) + getBuildsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.ListBuildsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.ListBuildsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.ListBuildsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.ListBuildsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response including listed builds. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.ListBuildsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.ListBuildsResponse) com.google.cloudbuild.v1.ListBuildsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ListBuildsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ListBuildsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.ListBuildsResponse.class, com.google.cloudbuild.v1.ListBuildsResponse.Builder.class); } // Construct using com.google.cloudbuild.v1.ListBuildsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (buildsBuilder_ == null) { builds_ = java.util.Collections.emptyList(); } else { builds_ = null; buildsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_ListBuildsResponse_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.ListBuildsResponse getDefaultInstanceForType() { return com.google.cloudbuild.v1.ListBuildsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.ListBuildsResponse build() { com.google.cloudbuild.v1.ListBuildsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.ListBuildsResponse buildPartial() { com.google.cloudbuild.v1.ListBuildsResponse result = new com.google.cloudbuild.v1.ListBuildsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloudbuild.v1.ListBuildsResponse result) { if (buildsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { builds_ = java.util.Collections.unmodifiableList(builds_); bitField0_ = (bitField0_ & ~0x00000001); } result.builds_ = builds_; } else { result.builds_ = buildsBuilder_.build(); } } private void buildPartial0(com.google.cloudbuild.v1.ListBuildsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.ListBuildsResponse) { return mergeFrom((com.google.cloudbuild.v1.ListBuildsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.ListBuildsResponse other) { if (other == com.google.cloudbuild.v1.ListBuildsResponse.getDefaultInstance()) return this; if (buildsBuilder_ == null) { if (!other.builds_.isEmpty()) { if (builds_.isEmpty()) { builds_ = other.builds_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureBuildsIsMutable(); builds_.addAll(other.builds_); } onChanged(); } } else { if (!other.builds_.isEmpty()) { if (buildsBuilder_.isEmpty()) { buildsBuilder_.dispose(); buildsBuilder_ = null; builds_ = other.builds_; bitField0_ = (bitField0_ & ~0x00000001); buildsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getBuildsFieldBuilder() : null; } else { buildsBuilder_.addAllMessages(other.builds_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloudbuild.v1.Build m = input.readMessage(com.google.cloudbuild.v1.Build.parser(), extensionRegistry); if (buildsBuilder_ == null) { ensureBuildsIsMutable(); builds_.add(m); } else { buildsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloudbuild.v1.Build> builds_ = java.util.Collections.emptyList(); private void ensureBuildsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { builds_ = new java.util.ArrayList<com.google.cloudbuild.v1.Build>(builds_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v1.Build, com.google.cloudbuild.v1.Build.Builder, com.google.cloudbuild.v1.BuildOrBuilder> buildsBuilder_; /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public java.util.List<com.google.cloudbuild.v1.Build> getBuildsList() { if (buildsBuilder_ == null) { return java.util.Collections.unmodifiableList(builds_); } else { return buildsBuilder_.getMessageList(); } } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public int getBuildsCount() { if (buildsBuilder_ == null) { return builds_.size(); } else { return buildsBuilder_.getCount(); } } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public com.google.cloudbuild.v1.Build getBuilds(int index) { if (buildsBuilder_ == null) { return builds_.get(index); } else { return buildsBuilder_.getMessage(index); } } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder setBuilds(int index, com.google.cloudbuild.v1.Build value) { if (buildsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBuildsIsMutable(); builds_.set(index, value); onChanged(); } else { buildsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder setBuilds(int index, com.google.cloudbuild.v1.Build.Builder builderForValue) { if (buildsBuilder_ == null) { ensureBuildsIsMutable(); builds_.set(index, builderForValue.build()); onChanged(); } else { buildsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder addBuilds(com.google.cloudbuild.v1.Build value) { if (buildsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBuildsIsMutable(); builds_.add(value); onChanged(); } else { buildsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder addBuilds(int index, com.google.cloudbuild.v1.Build value) { if (buildsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBuildsIsMutable(); builds_.add(index, value); onChanged(); } else { buildsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder addBuilds(com.google.cloudbuild.v1.Build.Builder builderForValue) { if (buildsBuilder_ == null) { ensureBuildsIsMutable(); builds_.add(builderForValue.build()); onChanged(); } else { buildsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder addBuilds(int index, com.google.cloudbuild.v1.Build.Builder builderForValue) { if (buildsBuilder_ == null) { ensureBuildsIsMutable(); builds_.add(index, builderForValue.build()); onChanged(); } else { buildsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder addAllBuilds( java.lang.Iterable<? extends com.google.cloudbuild.v1.Build> values) { if (buildsBuilder_ == null) { ensureBuildsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, builds_); onChanged(); } else { buildsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder clearBuilds() { if (buildsBuilder_ == null) { builds_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { buildsBuilder_.clear(); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public Builder removeBuilds(int index) { if (buildsBuilder_ == null) { ensureBuildsIsMutable(); builds_.remove(index); onChanged(); } else { buildsBuilder_.remove(index); } return this; } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public com.google.cloudbuild.v1.Build.Builder getBuildsBuilder(int index) { return getBuildsFieldBuilder().getBuilder(index); } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public com.google.cloudbuild.v1.BuildOrBuilder getBuildsOrBuilder(int index) { if (buildsBuilder_ == null) { return builds_.get(index); } else { return buildsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public java.util.List<? extends com.google.cloudbuild.v1.BuildOrBuilder> getBuildsOrBuilderList() { if (buildsBuilder_ != null) { return buildsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(builds_); } } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public com.google.cloudbuild.v1.Build.Builder addBuildsBuilder() { return getBuildsFieldBuilder() .addBuilder(com.google.cloudbuild.v1.Build.getDefaultInstance()); } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public com.google.cloudbuild.v1.Build.Builder addBuildsBuilder(int index) { return getBuildsFieldBuilder() .addBuilder(index, com.google.cloudbuild.v1.Build.getDefaultInstance()); } /** * * * <pre> * Builds will be sorted by `create_time`, descending. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Build builds = 1;</code> */ public java.util.List<com.google.cloudbuild.v1.Build.Builder> getBuildsBuilderList() { return getBuildsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v1.Build, com.google.cloudbuild.v1.Build.Builder, com.google.cloudbuild.v1.BuildOrBuilder> getBuildsFieldBuilder() { if (buildsBuilder_ == null) { buildsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v1.Build, com.google.cloudbuild.v1.Build.Builder, com.google.cloudbuild.v1.BuildOrBuilder>( builds_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); builds_ = null; } return buildsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to receive the next page of results. * This will be absent if the end of the response list has been reached. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.ListBuildsResponse) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListBuildsResponse) private static final com.google.cloudbuild.v1.ListBuildsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.ListBuildsResponse(); } public static com.google.cloudbuild.v1.ListBuildsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListBuildsResponse> PARSER = new com.google.protobuf.AbstractParser<ListBuildsResponse>() { @java.lang.Override public ListBuildsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListBuildsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListBuildsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.ListBuildsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,119
java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/CreateDisplayVideo360AdvertiserLinkRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1alpha/analytics_admin.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1alpha; /** * * * <pre> * Request message for CreateDisplayVideo360AdvertiserLink RPC. * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest} */ public final class CreateDisplayVideo360AdvertiserLinkRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) CreateDisplayVideo360AdvertiserLinkRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateDisplayVideo360AdvertiserLinkRequest.newBuilder() to construct. private CreateDisplayVideo360AdvertiserLinkRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateDisplayVideo360AdvertiserLinkRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateDisplayVideo360AdvertiserLinkRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateDisplayVideo360AdvertiserLinkRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateDisplayVideo360AdvertiserLinkRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest.class, com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest.Builder .class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DISPLAY_VIDEO_360_ADVERTISER_LINK_FIELD_NUMBER = 2; private com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink displayVideo360AdvertiserLink_; /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the displayVideo360AdvertiserLink field is set. */ @java.lang.Override public boolean hasDisplayVideo360AdvertiserLink() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The displayVideo360AdvertiserLink. */ @java.lang.Override public com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink getDisplayVideo360AdvertiserLink() { return displayVideo360AdvertiserLink_ == null ? com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.getDefaultInstance() : displayVideo360AdvertiserLink_; } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLinkOrBuilder getDisplayVideo360AdvertiserLinkOrBuilder() { return displayVideo360AdvertiserLink_ == null ? com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.getDefaultInstance() : displayVideo360AdvertiserLink_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getDisplayVideo360AdvertiserLink()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, getDisplayVideo360AdvertiserLink()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest)) { return super.equals(obj); } com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest other = (com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasDisplayVideo360AdvertiserLink() != other.hasDisplayVideo360AdvertiserLink()) return false; if (hasDisplayVideo360AdvertiserLink()) { if (!getDisplayVideo360AdvertiserLink().equals(other.getDisplayVideo360AdvertiserLink())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasDisplayVideo360AdvertiserLink()) { hash = (37 * hash) + DISPLAY_VIDEO_360_ADVERTISER_LINK_FIELD_NUMBER; hash = (53 * hash) + getDisplayVideo360AdvertiserLink().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for CreateDisplayVideo360AdvertiserLink RPC. * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateDisplayVideo360AdvertiserLinkRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateDisplayVideo360AdvertiserLinkRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest.class, com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest.Builder .class); } // Construct using // com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDisplayVideo360AdvertiserLinkFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; displayVideo360AdvertiserLink_ = null; if (displayVideo360AdvertiserLinkBuilder_ != null) { displayVideo360AdvertiserLinkBuilder_.dispose(); displayVideo360AdvertiserLinkBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateDisplayVideo360AdvertiserLinkRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest .getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest build() { com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest buildPartial() { com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest result = new com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.displayVideo360AdvertiserLink_ = displayVideo360AdvertiserLinkBuilder_ == null ? displayVideo360AdvertiserLink_ : displayVideo360AdvertiserLinkBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) { return mergeFrom( (com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest other) { if (other == com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasDisplayVideo360AdvertiserLink()) { mergeDisplayVideo360AdvertiserLink(other.getDisplayVideo360AdvertiserLink()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getDisplayVideo360AdvertiserLinkFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Example format: properties/1234 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink displayVideo360AdvertiserLink_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink, com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.Builder, com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLinkOrBuilder> displayVideo360AdvertiserLinkBuilder_; /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the displayVideo360AdvertiserLink field is set. */ public boolean hasDisplayVideo360AdvertiserLink() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The displayVideo360AdvertiserLink. */ public com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink getDisplayVideo360AdvertiserLink() { if (displayVideo360AdvertiserLinkBuilder_ == null) { return displayVideo360AdvertiserLink_ == null ? com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.getDefaultInstance() : displayVideo360AdvertiserLink_; } else { return displayVideo360AdvertiserLinkBuilder_.getMessage(); } } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDisplayVideo360AdvertiserLink( com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink value) { if (displayVideo360AdvertiserLinkBuilder_ == null) { if (value == null) { throw new NullPointerException(); } displayVideo360AdvertiserLink_ = value; } else { displayVideo360AdvertiserLinkBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDisplayVideo360AdvertiserLink( com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.Builder builderForValue) { if (displayVideo360AdvertiserLinkBuilder_ == null) { displayVideo360AdvertiserLink_ = builderForValue.build(); } else { displayVideo360AdvertiserLinkBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDisplayVideo360AdvertiserLink( com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink value) { if (displayVideo360AdvertiserLinkBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && displayVideo360AdvertiserLink_ != null && displayVideo360AdvertiserLink_ != com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink .getDefaultInstance()) { getDisplayVideo360AdvertiserLinkBuilder().mergeFrom(value); } else { displayVideo360AdvertiserLink_ = value; } } else { displayVideo360AdvertiserLinkBuilder_.mergeFrom(value); } if (displayVideo360AdvertiserLink_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDisplayVideo360AdvertiserLink() { bitField0_ = (bitField0_ & ~0x00000002); displayVideo360AdvertiserLink_ = null; if (displayVideo360AdvertiserLinkBuilder_ != null) { displayVideo360AdvertiserLinkBuilder_.dispose(); displayVideo360AdvertiserLinkBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.Builder getDisplayVideo360AdvertiserLinkBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDisplayVideo360AdvertiserLinkFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLinkOrBuilder getDisplayVideo360AdvertiserLinkOrBuilder() { if (displayVideo360AdvertiserLinkBuilder_ != null) { return displayVideo360AdvertiserLinkBuilder_.getMessageOrBuilder(); } else { return displayVideo360AdvertiserLink_ == null ? com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.getDefaultInstance() : displayVideo360AdvertiserLink_; } } /** * * * <pre> * Required. The DisplayVideo360AdvertiserLink to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink, com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.Builder, com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLinkOrBuilder> getDisplayVideo360AdvertiserLinkFieldBuilder() { if (displayVideo360AdvertiserLinkBuilder_ == null) { displayVideo360AdvertiserLinkBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink, com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink.Builder, com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLinkOrBuilder>( getDisplayVideo360AdvertiserLink(), getParentForChildren(), isClean()); displayVideo360AdvertiserLink_ = null; } return displayVideo360AdvertiserLinkBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest) private static final com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest(); } public static com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateDisplayVideo360AdvertiserLinkRequest> PARSER = new com.google.protobuf.AbstractParser<CreateDisplayVideo360AdvertiserLinkRequest>() { @java.lang.Override public CreateDisplayVideo360AdvertiserLinkRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateDisplayVideo360AdvertiserLinkRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateDisplayVideo360AdvertiserLinkRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/poi
35,278
poi-scratchpad/src/main/java/org/apache/poi/hemf/record/emf/HemfFill.java
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hemf.record.emf; import static org.apache.poi.hemf.record.emf.HemfDraw.readPointL; import static org.apache.poi.hemf.record.emf.HemfDraw.readRectL; import static org.apache.poi.hemf.record.emf.HemfRecordIterator.HEADER_SIZE; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.Area; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream; import org.apache.poi.hemf.draw.HemfDrawProperties; import org.apache.poi.hemf.draw.HemfGraphics; import org.apache.poi.hwmf.draw.HwmfGraphics; import org.apache.poi.hwmf.record.HwmfBitmapDib; import org.apache.poi.hwmf.record.HwmfColorRef; import org.apache.poi.hwmf.record.HwmfDraw; import org.apache.poi.hwmf.record.HwmfFill; import org.apache.poi.hwmf.record.HwmfFill.ColorUsage; import org.apache.poi.hwmf.record.HwmfRegionMode; import org.apache.poi.hwmf.record.HwmfTernaryRasterOp; import org.apache.poi.util.GenericRecordJsonWriter; import org.apache.poi.util.GenericRecordUtil; import org.apache.poi.util.IOUtils; import org.apache.poi.util.LittleEndianConsts; import org.apache.poi.util.LittleEndianInputStream; public final class HemfFill { private HemfFill() {} /** * The EMR_SETPOLYFILLMODE record defines polygon fill mode. */ public static class EmfSetPolyfillMode extends HwmfFill.WmfSetPolyfillMode implements HemfRecord { @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.setPolyfillMode; } @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { // A 32-bit unsigned integer that specifies the polygon fill mode and // MUST be in the PolygonFillMode enumeration. polyFillMode = HwmfPolyfillMode.valueOf((int)leis.readUInt()); return LittleEndianConsts.INT_SIZE; } @Override public HemfRecordType getGenericRecordType() { return getEmfRecordType(); } } public static class EmfExtFloodFill extends HwmfFill.WmfExtFloodFill implements HemfRecord { @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.extFloodFill; } @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { long size = readPointL(leis, start); size += colorRef.init(leis); // A 32-bit unsigned integer that specifies how to use the Color value to determine the area for // the flood fill operation. The value MUST be in the FloodFill enumeration mode = HwmfFloodFillMode.values()[(int)leis.readUInt()]; return size + LittleEndianConsts.INT_SIZE; } @Override public HemfRecordType getGenericRecordType() { return getEmfRecordType(); } } /** * The EMR_STRETCHBLT record specifies a block transfer of pixels from a source bitmap to a destination rectangle, * optionally in combination with a brush pattern, according to a specified raster operation, stretching or * compressing the output to fit the dimensions of the destination, if necessary. */ public static class EmfStretchBlt extends HwmfFill.WmfStretchDib implements HemfRecord { protected final Rectangle2D bounds = new Rectangle2D.Double(); /** An XForm object that specifies a world-space to page-space transform to apply to the source bitmap. */ protected final AffineTransform xFormSrc = new AffineTransform(); /** A WMF ColorRef object that specifies the background color of the source bitmap. */ protected final HwmfColorRef bkColorSrc = new HwmfColorRef(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.stretchBlt; } @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { int startIdx = leis.getReadIndex(); long size = readRectL(leis, bounds); size += readBounds2(leis, this.dstBounds); // A 32-bit unsigned integer that specifies the raster operation code. This code defines how the // color data of the source rectangle is to be combined with the color data of the destination // rectangle and optionally a brush pattern, to achieve the final color. int rasterOpIndex = (int)leis.readUInt(); rasterOperation = HwmfTernaryRasterOp.valueOf(rasterOpIndex >>> 16); size += LittleEndianConsts.INT_SIZE; final Point2D srcPnt = new Point2D.Double(); size += readPointL(leis, srcPnt); size += readXForm(leis, xFormSrc); size += bkColorSrc.init(leis); colorUsage = ColorUsage.valueOf((int)leis.readUInt()); // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap header in the BitmapBuffer field. final int offBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap header. final int cbBmiSrc = (int)leis.readUInt(); size += 3*LittleEndianConsts.INT_SIZE; if (size >= recordSize) { return size; } // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap bits in the BitmapBuffer field. final int offBitsSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap bits. final int cbBitsSrc = (int)leis.readUInt(); size += 2*LittleEndianConsts.INT_SIZE; if (size >= recordSize) { return size; } if (srcEqualsDstDimension()) { srcBounds.setRect(srcPnt.getX(), srcPnt.getY(), dstBounds.getWidth(), dstBounds.getHeight()); } else { int srcWidth = leis.readInt(); int srcHeight = leis.readInt(); size += 2 * LittleEndianConsts.INT_SIZE; srcBounds.setRect(srcPnt.getX(), srcPnt.getY(), srcWidth, srcHeight); } size += readBitmap(leis, bitmap, startIdx, offBmiSrc, cbBmiSrc, offBitsSrc, cbBitsSrc); return size; } protected boolean srcEqualsDstDimension() { return false; } @Override public void draw(HemfGraphics ctx) { HemfDrawProperties prop = ctx.getProperties(); prop.setBackgroundColor(this.bkColorSrc); super.draw(ctx); } @Override public String toString() { return GenericRecordJsonWriter.marshal(this); } public Rectangle2D getBounds() { return bounds; } public AffineTransform getXFormSrc() { return xFormSrc; } public HwmfColorRef getBkColorSrc() { return bkColorSrc; } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "base", super::getGenericProperties, "bounds", this::getBounds, "xFormSrc", this::getXFormSrc, "bkColorSrc", this::getBkColorSrc ); } @Override public HemfRecordType getGenericRecordType() { return getEmfRecordType(); } } /** * The EMR_STRETCHDIBITS record specifies a block transfer of pixels from a source bitmap to a * destination rectangle, optionally in combination with a brush pattern, according to a specified raster * operation, stretching or compressing the output to fit the dimensions of the destination, if necessary. */ public static class EmfStretchDiBits extends HwmfFill.WmfStretchDib implements HemfRecord { protected final Rectangle2D bounds = new Rectangle2D.Double(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.stretchDiBits; } @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { final int startIdx = leis.getReadIndex(); long size = readRectL(leis, bounds); // A 32-bit signed integer that specifies the logical x-coordinate of the upper-left // corner of the destination rectangle. int xDest = leis.readInt(); int yDest = leis.readInt(); size += 2*LittleEndianConsts.INT_SIZE; size += readBounds2(leis, srcBounds); // A 32-bit unsigned integer that specifies the offset, in bytes from the start // of this record to the source bitmap header. int offBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap header. int cbBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap bits. int offBitsSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap bits. int cbBitsSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies how to interpret values in the color table // in the source bitmap header. This value MUST be in the DIBColors enumeration colorUsage = ColorUsage.valueOf(leis.readInt()); // A 32-bit unsigned integer that specifies a raster operation code. // These codes define how the color data of the source rectangle is to be combined with the color data // of the destination rectangle and optionally a brush pattern, to achieve the final color. // The value MUST be in the WMF Ternary Raster Operation enumeration int rasterOpIndex = (int)leis.readUInt(); rasterOperation = HwmfTernaryRasterOp.valueOf(rasterOpIndex >>> 16); // A 32-bit signed integer that specifies the logical width of the destination rectangle. int cxDest = leis.readInt(); // A 32-bit signed integer that specifies the logical height of the destination rectangle. int cyDest = leis.readInt(); dstBounds.setRect(xDest, yDest, cxDest, cyDest); size += 8*LittleEndianConsts.INT_SIZE; size += readBitmap(leis, bitmap, startIdx, offBmiSrc, cbBmiSrc, offBitsSrc, cbBitsSrc); return size; } public Rectangle2D getBounds() { return bounds; } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "base", super::getGenericProperties, "bounds", this::getBounds ); } @Override public HemfRecordType getGenericRecordType() { return getEmfRecordType(); } } /** * The EMR_BITBLT record specifies a block transfer of pixels from a source bitmap to a destination rectangle, * optionally in combination with a brush pattern, according to a specified raster operation. */ public static class EmfBitBlt extends EmfStretchBlt { @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.bitBlt; } @Override protected boolean srcEqualsDstDimension() { return false; } } /** The EMR_FRAMERGN record draws a border around the specified region using the specified brush. */ public static class EmfFrameRgn extends HwmfDraw.WmfFrameRegion implements HemfRecord { private final Rectangle2D bounds = new Rectangle2D.Double(); private final List<Rectangle2D> rgnRects = new ArrayList<>(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.frameRgn; } @SuppressWarnings("unused") @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { long size = readRectL(leis, bounds); // A 32-bit unsigned integer that specifies the size of region data, in bytes. long rgnDataSize = leis.readUInt(); // A 32-bit unsigned integer that specifies the brush EMF Object Table index. brushIndex = (int)leis.readUInt(); // A 32-bit signed integer that specifies the width of the vertical brush stroke, in logical units. int width = leis.readInt(); // A 32-bit signed integer that specifies the height of the horizontal brush stroke, in logical units. int height = leis.readInt(); frame.setSize(width,height); size += 4*LittleEndianConsts.INT_SIZE; size += readRgnData(leis, rgnRects); return size; } @Override public void draw(HwmfGraphics ctx) { ctx.applyObjectTableEntry(brushIndex); ctx.fill(getShape()); } protected Shape getShape() { return getRgnShape(rgnRects); } public Rectangle2D getBounds() { return bounds; } public List<Rectangle2D> getRgnRects() { return rgnRects; } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "base", super::getGenericProperties, "bounds", this::getBounds, "rgnRects", this::getRgnRects ); } @Override public HemfRecordType getGenericRecordType() { return getEmfRecordType(); } } /** The EMR_INVERTRGN record inverts the colors in the specified region. */ public static class EmfInvertRgn implements HemfRecord { protected final Rectangle2D bounds = new Rectangle2D.Double(); protected final List<Rectangle2D> rgnRects = new ArrayList<>(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.invertRgn; } @SuppressWarnings("unused") @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { long size = readRectL(leis, bounds); // A 32-bit unsigned integer that specifies the size of region data, in bytes. long rgnDataSize = leis.readUInt(); size += LittleEndianConsts.INT_SIZE; size += readRgnData(leis, rgnRects); return size; } protected Shape getShape() { return getRgnShape(rgnRects); } public Rectangle2D getBounds() { return bounds; } public List<Rectangle2D> getRgnRects() { return rgnRects; } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "bounds", this::getBounds, "rgnRects", this::getRgnRects ); } } /** * The EMR_PAINTRGN record paints the specified region by using the brush currently selected into the * playback device context. */ public static class EmfPaintRgn extends EmfInvertRgn { @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.paintRgn; } } /** The EMR_FILLRGN record fills the specified region by using the specified brush. */ public static class EmfFillRgn extends HwmfFill.WmfFillRegion implements HemfRecord { protected final Rectangle2D bounds = new Rectangle2D.Double(); protected final List<Rectangle2D> rgnRects = new ArrayList<>(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.fillRgn; } @SuppressWarnings("unused") @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { long size = readRectL(leis, bounds); // A 32-bit unsigned integer that specifies the size of region data, in bytes. long rgnDataSize = leis.readUInt(); brushIndex = (int)leis.readUInt(); size += 2*LittleEndianConsts.INT_SIZE; size += readRgnData(leis, rgnRects); return size; } protected Shape getShape() { return getRgnShape(rgnRects); } public Rectangle2D getBounds() { return bounds; } public List<Rectangle2D> getRgnRects() { return rgnRects; } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "base", super::getGenericProperties, "bounds", this::getBounds, "rgnRects", this::getRgnRects ); } @Override public HemfRecordType getGenericRecordType() { return getEmfRecordType(); } } public static class EmfExtSelectClipRgn implements HemfRecord { protected HwmfRegionMode regionMode; protected final List<Rectangle2D> rgnRects = new ArrayList<>(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.extSelectClipRgn; } @SuppressWarnings("unused") @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { // A 32-bit unsigned integer that specifies the size of region data in bytes long rgnDataSize = leis.readUInt(); // A 32-bit unsigned integer that specifies the way to use the region. regionMode = HwmfRegionMode.valueOf((int)leis.readUInt()); long size = 2L * LittleEndianConsts.INT_SIZE; // If RegionMode is RGN_COPY, this data can be omitted and the clip region // SHOULD be set to the default (NULL) clip region. if (regionMode != HwmfRegionMode.RGN_COPY) { size += readRgnData(leis, rgnRects); } return size; } protected Shape getShape() { return getRgnShape(rgnRects); } @Override public void draw(HemfGraphics ctx) { ctx.setClip(getShape(), regionMode, true); } @Override public String toString() { return GenericRecordJsonWriter.marshal(this); } public HwmfRegionMode getRegionMode() { return regionMode; } public List<Rectangle2D> getRgnRects() { return rgnRects; } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "regionMode", this::getRegionMode, "rgnRects", this::getRgnRects ); } } public static class EmfAlphaBlend implements HemfRecord { /** the destination bounding rectangle in device units */ protected final Rectangle2D bounds = new Rectangle2D.Double(); /** the destination rectangle */ protected final Rectangle2D destRect = new Rectangle2D.Double(); /** the source rectangle */ protected final Rectangle2D srcRect = new Rectangle2D.Double(); /** * The blend operation code. The only source and destination blend operation that has been defined * is 0x00, which specifies that the source bitmap MUST be combined with the destination bitmap based * on the alpha transparency values of the source pixels. */ protected byte blendOperation; /** This value MUST be 0x00 and MUST be ignored. */ protected byte blendFlags; /** * An 8-bit unsigned integer that specifies alpha transparency, which determines the blend of the source * and destination bitmaps. This value MUST be used on the entire source bitmap. The minimum alpha * transparency value, zero, corresponds to completely transparent; the maximum value, 0xFF, corresponds * to completely opaque. In effect, a value of 0xFF specifies that the per-pixel alpha values determine * the blend of the source and destination bitmaps. */ protected int srcConstantAlpha; /** * A byte that specifies how source and destination pixels are interpreted with respect to alpha transparency. * * 0x00: * The pixels in the source bitmap do not specify alpha transparency. * In this case, the SrcConstantAlpha value determines the blend of the source and destination bitmaps. * Note that in the following equations SrcConstantAlpha is divided by 255, * which produces a value in the range 0 to 1. * * 0x01: "AC_SRC_ALPHA" * Indicates that the source bitmap is 32 bits-per-pixel and specifies an alpha transparency value * for each pixel. */ protected byte alphaFormat; /** a world-space to page-space transform to apply to the source bitmap. */ protected final AffineTransform xFormSrc = new AffineTransform(); /** the background color of the source bitmap. */ protected final HwmfColorRef bkColorSrc = new HwmfColorRef(); /** * A 32-bit unsigned integer that specifies how to interpret values in the * color table in the source bitmap header. */ protected ColorUsage usageSrc; protected final HwmfBitmapDib bitmap = new HwmfBitmapDib(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.alphaBlend; } @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { final int startIdx = leis.getReadIndex(); long size = readRectL(leis, bounds); size += readBounds2(leis, destRect); blendOperation = leis.readByte(); assert (blendOperation == 0); blendFlags = leis.readByte(); assert (blendOperation == 0); srcConstantAlpha = leis.readUByte(); alphaFormat = leis.readByte(); // A 32-bit signed integer that specifies the logical x-coordinate of the upper-left // corner of the source rectangle. final int xSrc = leis.readInt(); // A 32-bit signed integer that specifies the logical y-coordinate of the upper-left // corner of the source rectangle. final int ySrc = leis.readInt(); size += 3*LittleEndianConsts.INT_SIZE; size += readXForm(leis, xFormSrc); size += bkColorSrc.init(leis); usageSrc = ColorUsage.valueOf((int)leis.readUInt()); // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap header in the BitmapBuffer field. final int offBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap header. final int cbBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap bits in the BitmapBuffer field. final int offBitsSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap bits. final int cbBitsSrc = (int)leis.readUInt(); // A 32-bit signed integer that specifies the logical width of the source rectangle. // This value MUST be greater than zero. final int cxSrc = leis.readInt(); // A 32-bit signed integer that specifies the logical height of the source rectangle. // This value MUST be greater than zero. final int cySrc = leis.readInt(); srcRect.setRect(xSrc, ySrc, cxSrc, cySrc); size += 7 * LittleEndianConsts.INT_SIZE; size += readBitmap(leis, bitmap, startIdx, offBmiSrc, cbBmiSrc, offBitsSrc, cbBitsSrc); return size; } @Override public Map<String, Supplier<?>> getGenericProperties() { final Map<String,Supplier<?>> m = new LinkedHashMap<>(); m.put("bounds", () -> bounds); m.put("destRect", () -> destRect); m.put("srcRect", () -> srcRect); m.put("blendOperation", () -> blendOperation); m.put("blendFlags", () -> blendFlags); m.put("srcConstantAlpha", () -> srcConstantAlpha); m.put("alphaFormat", () -> alphaFormat); m.put("xFormSrc", () -> xFormSrc); m.put("bkColorSrc", () -> bkColorSrc); m.put("usageSrc", () -> usageSrc); m.put("bitmap", () -> bitmap); return Collections.unmodifiableMap(m); } } /** * The EMR_SETDIBITSTODEVICE record specifies a block transfer of pixels from specified scanlines of * a source bitmap to a destination rectangle. */ public static class EmfSetDiBitsToDevice implements HemfRecord { protected final Rectangle2D bounds = new Rectangle2D.Double(); protected final Point2D dest = new Point2D.Double(); protected final Rectangle2D src = new Rectangle2D.Double(); protected ColorUsage usageSrc; protected final HwmfBitmapDib bitmap = new HwmfBitmapDib(); @Override public HemfRecordType getEmfRecordType() { return HemfRecordType.setDiBitsToDevice; } @SuppressWarnings("unused") @Override public long init(LittleEndianInputStream leis, long recordSize, long recordId) throws IOException { int startIdx = leis.getReadIndex(); // A WMF RectL object that defines the destination bounding rectangle in device units. long size = readRectL(leis, bounds); // the logical x/y-coordinate of the upper-left corner of the destination rectangle. size += readPointL(leis, dest); // the source rectangle size += readBounds2(leis, src); // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap header in the BitmapBuffer field. final int offBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap header. final int cbBmiSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the offset, in bytes, from the // start of this record to the source bitmap bits in the BitmapBuffer field. final int offBitsSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the size, in bytes, of the source bitmap bits. final int cbBitsSrc = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies how to interpret values in the color table // in the source bitmap header. This value MUST be in the DIBColors enumeration usageSrc = ColorUsage.valueOf((int)leis.readUInt()); // A 32-bit unsigned integer that specifies the first scan line in the array. final int iStartScan = (int)leis.readUInt(); // A 32-bit unsigned integer that specifies the number of scan lines. final int cScans = (int)leis.readUInt(); size += 7*LittleEndianConsts.INT_SIZE; size += readBitmap(leis, bitmap, startIdx, offBmiSrc, cbBmiSrc, offBitsSrc, cbBitsSrc); return size; } public Rectangle2D getBounds() { return bounds; } public Point2D getDest() { return dest; } public Rectangle2D getSrc() { return src; } public ColorUsage getUsageSrc() { return usageSrc; } public HwmfBitmapDib getBitmap() { return bitmap; } @Override public String toString() { return GenericRecordJsonWriter.marshal(this); } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties( "bounds", this::getBounds, "dest", this::getDest, "src", this::getSrc, "usageSrc", this::getUsageSrc, "bitmap", this::getBitmap ); } } static long readBitmap(final LittleEndianInputStream leis, final HwmfBitmapDib bitmap, final int startIdx, final int offBmi, final int cbBmi, final int offBits, int cbBits) throws IOException { if (offBmi == 0) { return 0; } final int offCurr = leis.getReadIndex()-(startIdx-HEADER_SIZE); final int undefinedSpace1 = offBmi-offCurr; if (undefinedSpace1 < 0) { return 0; } final int undefinedSpace2 = offBits-offCurr-cbBmi-undefinedSpace1; assert(undefinedSpace2 >= 0); leis.skipFully(undefinedSpace1); if (cbBmi == 0 || cbBits == 0) { return undefinedSpace1; } final int dibSize = cbBmi+cbBits; if (undefinedSpace2 == 0) { return (long)undefinedSpace1 + bitmap.init(leis, dibSize); } final UnsynchronizedByteArrayOutputStream bos = UnsynchronizedByteArrayOutputStream.builder().setBufferSize(cbBmi+cbBits).get(); final long cbBmiSrcAct = IOUtils.copy(leis, bos, cbBmi); assert (cbBmiSrcAct == cbBmi); leis.skipFully(undefinedSpace2); final long cbBitsSrcAct = IOUtils.copy(leis, bos, cbBits); assert (cbBitsSrcAct == cbBits); final LittleEndianInputStream leisDib = new LittleEndianInputStream(bos.toInputStream()); final int dibSizeAct = bitmap.init(leisDib, dibSize); assert (dibSizeAct <= dibSize); return (long)undefinedSpace1 + cbBmi + undefinedSpace2 + cbBits; } @SuppressWarnings("unused") static long readRgnData(final LittleEndianInputStream leis, final List<Rectangle2D> rgnRects) { // *** RegionDataHeader *** // A 32-bit unsigned integer that specifies the size of this object in bytes. This MUST be 0x00000020. long rgnHdrSize = leis.readUInt(); assert(rgnHdrSize == 0x20); // A 32-bit unsigned integer that specifies the region type. This SHOULD be RDH_RECTANGLES (0x00000001) long rgnHdrType = leis.readUInt(); assert(rgnHdrType == 1); // A 32-bit unsigned integer that specifies the number of rectangles in this region. long rgnCntRect = leis.readUInt(); // A 32-bit unsigned integer that specifies the size of the buffer of rectangles in bytes. long rgnCntBytes = leis.readUInt(); long size = 4L*LittleEndianConsts.INT_SIZE; // A 128-bit WMF RectL object, which specifies the bounds of the region. Rectangle2D rgnBounds = new Rectangle2D.Double(); size += readRectL(leis, rgnBounds); for (int i=0; i<rgnCntRect; i++) { Rectangle2D rgnRct = new Rectangle2D.Double(); size += readRectL(leis, rgnRct); rgnRects.add(rgnRct); } return size; } static int readBounds2(LittleEndianInputStream leis, Rectangle2D bounds) { // The 32-bit signed integers that defines the corners of the bounding rectangle. int x = leis.readInt(); int y = leis.readInt(); int w = leis.readInt(); int h = leis.readInt(); bounds.setRect(x, y, w, h); return 4 * LittleEndianConsts.INT_SIZE; } public static int readXForm(LittleEndianInputStream leis, AffineTransform xform) { // mapping <java AffineTransform> = <xform> // m00 (scaleX) = eM11 (Horizontal scaling component) double m00 = leis.readFloat(); // m01 (shearX) = eM12 (Horizontal proportionality constant) double m01 = leis.readFloat(); // m10 (shearY) = eM21 (Vertical proportionality constant) double m10 = leis.readFloat(); // m11 (scaleY) = eM22 (Vertical scaling component) double m11 = leis.readFloat(); // m02 (translateX) = eDx (The horizontal translation component, in logical units.) double m02 = leis.readFloat(); // m12 (translateY) = eDy (The vertical translation component, in logical units.) double m12 = leis.readFloat(); // TODO: not sure, why the shearing has to be inverted here, // probably because of the different world/user space transformation xform.setTransform(m00, -m10, -m01, m11, m02, m12); if (xform.isIdentity()) { xform.setToIdentity(); } return 6 * LittleEndianConsts.INT_SIZE; } static Shape getRgnShape(List<Rectangle2D> rgnRects) { if (rgnRects.size() == 1) { return rgnRects.get(0); } final Area frame = new Area(); rgnRects.forEach((rct) -> frame.add(new Area(rct))); return frame; } }
googleapis/google-cloud-java
35,040
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/InspectionRule.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * A single inspection rule to be applied to infoTypes, specified in * `InspectionRuleSet`. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.InspectionRule} */ public final class InspectionRule extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.InspectionRule) InspectionRuleOrBuilder { private static final long serialVersionUID = 0L; // Use InspectionRule.newBuilder() to construct. private InspectionRule(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InspectionRule() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InspectionRule(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_InspectionRule_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_InspectionRule_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.InspectionRule.class, com.google.privacy.dlp.v2.InspectionRule.Builder.class); } private int typeCase_ = 0; @SuppressWarnings("serial") private java.lang.Object type_; public enum TypeCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { HOTWORD_RULE(1), EXCLUSION_RULE(2), TYPE_NOT_SET(0); private final int value; private TypeCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TypeCase valueOf(int value) { return forNumber(value); } public static TypeCase forNumber(int value) { switch (value) { case 1: return HOTWORD_RULE; case 2: return EXCLUSION_RULE; case 0: return TYPE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public TypeCase getTypeCase() { return TypeCase.forNumber(typeCase_); } public static final int HOTWORD_RULE_FIELD_NUMBER = 1; /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1;</code> * * @return Whether the hotwordRule field is set. */ @java.lang.Override public boolean hasHotwordRule() { return typeCase_ == 1; } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1;</code> * * @return The hotwordRule. */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule getHotwordRule() { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_; } return com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.getDefaultInstance(); } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleOrBuilder getHotwordRuleOrBuilder() { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_; } return com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.getDefaultInstance(); } public static final int EXCLUSION_RULE_FIELD_NUMBER = 2; /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> * * @return Whether the exclusionRule field is set. */ @java.lang.Override public boolean hasExclusionRule() { return typeCase_ == 2; } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> * * @return The exclusionRule. */ @java.lang.Override public com.google.privacy.dlp.v2.ExclusionRule getExclusionRule() { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2.ExclusionRule) type_; } return com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance(); } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.ExclusionRuleOrBuilder getExclusionRuleOrBuilder() { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2.ExclusionRule) type_; } return com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (typeCase_ == 1) { output.writeMessage( 1, (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_); } if (typeCase_ == 2) { output.writeMessage(2, (com.google.privacy.dlp.v2.ExclusionRule) type_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (typeCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_); } if (typeCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.privacy.dlp.v2.ExclusionRule) type_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.InspectionRule)) { return super.equals(obj); } com.google.privacy.dlp.v2.InspectionRule other = (com.google.privacy.dlp.v2.InspectionRule) obj; if (!getTypeCase().equals(other.getTypeCase())) return false; switch (typeCase_) { case 1: if (!getHotwordRule().equals(other.getHotwordRule())) return false; break; case 2: if (!getExclusionRule().equals(other.getExclusionRule())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (typeCase_) { case 1: hash = (37 * hash) + HOTWORD_RULE_FIELD_NUMBER; hash = (53 * hash) + getHotwordRule().hashCode(); break; case 2: hash = (37 * hash) + EXCLUSION_RULE_FIELD_NUMBER; hash = (53 * hash) + getExclusionRule().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.InspectionRule parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.InspectionRule parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.InspectionRule parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.InspectionRule parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.InspectionRule prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A single inspection rule to be applied to infoTypes, specified in * `InspectionRuleSet`. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.InspectionRule} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.InspectionRule) com.google.privacy.dlp.v2.InspectionRuleOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_InspectionRule_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_InspectionRule_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.InspectionRule.class, com.google.privacy.dlp.v2.InspectionRule.Builder.class); } // Construct using com.google.privacy.dlp.v2.InspectionRule.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (hotwordRuleBuilder_ != null) { hotwordRuleBuilder_.clear(); } if (exclusionRuleBuilder_ != null) { exclusionRuleBuilder_.clear(); } typeCase_ = 0; type_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_InspectionRule_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.InspectionRule getDefaultInstanceForType() { return com.google.privacy.dlp.v2.InspectionRule.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.InspectionRule build() { com.google.privacy.dlp.v2.InspectionRule result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.InspectionRule buildPartial() { com.google.privacy.dlp.v2.InspectionRule result = new com.google.privacy.dlp.v2.InspectionRule(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.InspectionRule result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.privacy.dlp.v2.InspectionRule result) { result.typeCase_ = typeCase_; result.type_ = this.type_; if (typeCase_ == 1 && hotwordRuleBuilder_ != null) { result.type_ = hotwordRuleBuilder_.build(); } if (typeCase_ == 2 && exclusionRuleBuilder_ != null) { result.type_ = exclusionRuleBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.InspectionRule) { return mergeFrom((com.google.privacy.dlp.v2.InspectionRule) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.InspectionRule other) { if (other == com.google.privacy.dlp.v2.InspectionRule.getDefaultInstance()) return this; switch (other.getTypeCase()) { case HOTWORD_RULE: { mergeHotwordRule(other.getHotwordRule()); break; } case EXCLUSION_RULE: { mergeExclusionRule(other.getExclusionRule()); break; } case TYPE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getHotwordRuleFieldBuilder().getBuilder(), extensionRegistry); typeCase_ = 1; break; } // case 10 case 18: { input.readMessage(getExclusionRuleFieldBuilder().getBuilder(), extensionRegistry); typeCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int typeCase_ = 0; private java.lang.Object type_; public TypeCase getTypeCase() { return TypeCase.forNumber(typeCase_); } public Builder clearType() { typeCase_ = 0; type_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.Builder, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleOrBuilder> hotwordRuleBuilder_; /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> * * @return Whether the hotwordRule field is set. */ @java.lang.Override public boolean hasHotwordRule() { return typeCase_ == 1; } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> * * @return The hotwordRule. */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule getHotwordRule() { if (hotwordRuleBuilder_ == null) { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_; } return com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule .getDefaultInstance(); } else { if (typeCase_ == 1) { return hotwordRuleBuilder_.getMessage(); } return com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule .getDefaultInstance(); } } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ public Builder setHotwordRule( com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule value) { if (hotwordRuleBuilder_ == null) { if (value == null) { throw new NullPointerException(); } type_ = value; onChanged(); } else { hotwordRuleBuilder_.setMessage(value); } typeCase_ = 1; return this; } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ public Builder setHotwordRule( com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.Builder builderForValue) { if (hotwordRuleBuilder_ == null) { type_ = builderForValue.build(); onChanged(); } else { hotwordRuleBuilder_.setMessage(builderForValue.build()); } typeCase_ = 1; return this; } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ public Builder mergeHotwordRule( com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule value) { if (hotwordRuleBuilder_ == null) { if (typeCase_ == 1 && type_ != com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule .getDefaultInstance()) { type_ = com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.newBuilder( (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_) .mergeFrom(value) .buildPartial(); } else { type_ = value; } onChanged(); } else { if (typeCase_ == 1) { hotwordRuleBuilder_.mergeFrom(value); } else { hotwordRuleBuilder_.setMessage(value); } } typeCase_ = 1; return this; } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ public Builder clearHotwordRule() { if (hotwordRuleBuilder_ == null) { if (typeCase_ == 1) { typeCase_ = 0; type_ = null; onChanged(); } } else { if (typeCase_ == 1) { typeCase_ = 0; type_ = null; } hotwordRuleBuilder_.clear(); } return this; } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.Builder getHotwordRuleBuilder() { return getHotwordRuleFieldBuilder().getBuilder(); } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleOrBuilder getHotwordRuleOrBuilder() { if ((typeCase_ == 1) && (hotwordRuleBuilder_ != null)) { return hotwordRuleBuilder_.getMessageOrBuilder(); } else { if (typeCase_ == 1) { return (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_; } return com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule .getDefaultInstance(); } } /** * * * <pre> * Hotword-based detection rule. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule hotword_rule = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.Builder, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleOrBuilder> getHotwordRuleFieldBuilder() { if (hotwordRuleBuilder_ == null) { if (!(typeCase_ == 1)) { type_ = com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule .getDefaultInstance(); } hotwordRuleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.Builder, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleOrBuilder>( (com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) type_, getParentForChildren(), isClean()); type_ = null; } typeCase_ = 1; onChanged(); return hotwordRuleBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.ExclusionRule, com.google.privacy.dlp.v2.ExclusionRule.Builder, com.google.privacy.dlp.v2.ExclusionRuleOrBuilder> exclusionRuleBuilder_; /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> * * @return Whether the exclusionRule field is set. */ @java.lang.Override public boolean hasExclusionRule() { return typeCase_ == 2; } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> * * @return The exclusionRule. */ @java.lang.Override public com.google.privacy.dlp.v2.ExclusionRule getExclusionRule() { if (exclusionRuleBuilder_ == null) { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2.ExclusionRule) type_; } return com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance(); } else { if (typeCase_ == 2) { return exclusionRuleBuilder_.getMessage(); } return com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance(); } } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ public Builder setExclusionRule(com.google.privacy.dlp.v2.ExclusionRule value) { if (exclusionRuleBuilder_ == null) { if (value == null) { throw new NullPointerException(); } type_ = value; onChanged(); } else { exclusionRuleBuilder_.setMessage(value); } typeCase_ = 2; return this; } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ public Builder setExclusionRule( com.google.privacy.dlp.v2.ExclusionRule.Builder builderForValue) { if (exclusionRuleBuilder_ == null) { type_ = builderForValue.build(); onChanged(); } else { exclusionRuleBuilder_.setMessage(builderForValue.build()); } typeCase_ = 2; return this; } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ public Builder mergeExclusionRule(com.google.privacy.dlp.v2.ExclusionRule value) { if (exclusionRuleBuilder_ == null) { if (typeCase_ == 2 && type_ != com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance()) { type_ = com.google.privacy.dlp.v2.ExclusionRule.newBuilder( (com.google.privacy.dlp.v2.ExclusionRule) type_) .mergeFrom(value) .buildPartial(); } else { type_ = value; } onChanged(); } else { if (typeCase_ == 2) { exclusionRuleBuilder_.mergeFrom(value); } else { exclusionRuleBuilder_.setMessage(value); } } typeCase_ = 2; return this; } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ public Builder clearExclusionRule() { if (exclusionRuleBuilder_ == null) { if (typeCase_ == 2) { typeCase_ = 0; type_ = null; onChanged(); } } else { if (typeCase_ == 2) { typeCase_ = 0; type_ = null; } exclusionRuleBuilder_.clear(); } return this; } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ public com.google.privacy.dlp.v2.ExclusionRule.Builder getExclusionRuleBuilder() { return getExclusionRuleFieldBuilder().getBuilder(); } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.ExclusionRuleOrBuilder getExclusionRuleOrBuilder() { if ((typeCase_ == 2) && (exclusionRuleBuilder_ != null)) { return exclusionRuleBuilder_.getMessageOrBuilder(); } else { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2.ExclusionRule) type_; } return com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance(); } } /** * * * <pre> * Exclusion rule. * </pre> * * <code>.google.privacy.dlp.v2.ExclusionRule exclusion_rule = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.ExclusionRule, com.google.privacy.dlp.v2.ExclusionRule.Builder, com.google.privacy.dlp.v2.ExclusionRuleOrBuilder> getExclusionRuleFieldBuilder() { if (exclusionRuleBuilder_ == null) { if (!(typeCase_ == 2)) { type_ = com.google.privacy.dlp.v2.ExclusionRule.getDefaultInstance(); } exclusionRuleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.ExclusionRule, com.google.privacy.dlp.v2.ExclusionRule.Builder, com.google.privacy.dlp.v2.ExclusionRuleOrBuilder>( (com.google.privacy.dlp.v2.ExclusionRule) type_, getParentForChildren(), isClean()); type_ = null; } typeCase_ = 2; onChanged(); return exclusionRuleBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.InspectionRule) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InspectionRule) private static final com.google.privacy.dlp.v2.InspectionRule DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.InspectionRule(); } public static com.google.privacy.dlp.v2.InspectionRule getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InspectionRule> PARSER = new com.google.protobuf.AbstractParser<InspectionRule>() { @java.lang.Override public InspectionRule parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InspectionRule> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InspectionRule> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.InspectionRule getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,140
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ImportRagFilesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/vertex_rag_data_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ImportRagFilesRequest} */ public final class ImportRagFilesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) ImportRagFilesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ImportRagFilesRequest.newBuilder() to construct. private ImportRagFilesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportRagFilesRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportRagFilesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ImportRagFilesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ImportRagFilesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.class, com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int IMPORT_RAG_FILES_CONFIG_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig importRagFilesConfig_; /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the importRagFilesConfig field is set. */ @java.lang.Override public boolean hasImportRagFilesConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The importRagFilesConfig. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig getImportRagFilesConfig() { return importRagFilesConfig_ == null ? com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.getDefaultInstance() : importRagFilesConfig_; } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfigOrBuilder getImportRagFilesConfigOrBuilder() { return importRagFilesConfig_ == null ? com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.getDefaultInstance() : importRagFilesConfig_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getImportRagFilesConfig()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getImportRagFilesConfig()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest other = (com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasImportRagFilesConfig() != other.hasImportRagFilesConfig()) return false; if (hasImportRagFilesConfig()) { if (!getImportRagFilesConfig().equals(other.getImportRagFilesConfig())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasImportRagFilesConfig()) { hash = (37 * hash) + IMPORT_RAG_FILES_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getImportRagFilesConfig().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ImportRagFilesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ImportRagFilesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ImportRagFilesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.class, com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getImportRagFilesConfigFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; importRagFilesConfig_ = null; if (importRagFilesConfigBuilder_ != null) { importRagFilesConfigBuilder_.dispose(); importRagFilesConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ImportRagFilesRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest build() { com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest result = new com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.importRagFilesConfig_ = importRagFilesConfigBuilder_ == null ? importRagFilesConfig_ : importRagFilesConfigBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasImportRagFilesConfig()) { mergeImportRagFilesConfig(other.getImportRagFilesConfig()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getImportRagFilesConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the RagCorpus resource into which to import files. * Format: * `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig importRagFilesConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig, com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.Builder, com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfigOrBuilder> importRagFilesConfigBuilder_; /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the importRagFilesConfig field is set. */ public boolean hasImportRagFilesConfig() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The importRagFilesConfig. */ public com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig getImportRagFilesConfig() { if (importRagFilesConfigBuilder_ == null) { return importRagFilesConfig_ == null ? com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.getDefaultInstance() : importRagFilesConfig_; } else { return importRagFilesConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setImportRagFilesConfig( com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig value) { if (importRagFilesConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } importRagFilesConfig_ = value; } else { importRagFilesConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setImportRagFilesConfig( com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.Builder builderForValue) { if (importRagFilesConfigBuilder_ == null) { importRagFilesConfig_ = builderForValue.build(); } else { importRagFilesConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeImportRagFilesConfig( com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig value) { if (importRagFilesConfigBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && importRagFilesConfig_ != null && importRagFilesConfig_ != com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.getDefaultInstance()) { getImportRagFilesConfigBuilder().mergeFrom(value); } else { importRagFilesConfig_ = value; } } else { importRagFilesConfigBuilder_.mergeFrom(value); } if (importRagFilesConfig_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearImportRagFilesConfig() { bitField0_ = (bitField0_ & ~0x00000002); importRagFilesConfig_ = null; if (importRagFilesConfigBuilder_ != null) { importRagFilesConfigBuilder_.dispose(); importRagFilesConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.Builder getImportRagFilesConfigBuilder() { bitField0_ |= 0x00000002; onChanged(); return getImportRagFilesConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfigOrBuilder getImportRagFilesConfigOrBuilder() { if (importRagFilesConfigBuilder_ != null) { return importRagFilesConfigBuilder_.getMessageOrBuilder(); } else { return importRagFilesConfig_ == null ? com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.getDefaultInstance() : importRagFilesConfig_; } } /** * * * <pre> * Required. The config for the RagFiles to be synced and imported into the * RagCorpus. * [VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles]. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ImportRagFilesConfig import_rag_files_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig, com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.Builder, com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfigOrBuilder> getImportRagFilesConfigFieldBuilder() { if (importRagFilesConfigBuilder_ == null) { importRagFilesConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig, com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfig.Builder, com.google.cloud.aiplatform.v1beta1.ImportRagFilesConfigOrBuilder>( getImportRagFilesConfig(), getParentForChildren(), isClean()); importRagFilesConfig_ = null; } return importRagFilesConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ImportRagFilesRequest) private static final com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest(); } public static com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportRagFilesRequest> PARSER = new com.google.protobuf.AbstractParser<ImportRagFilesRequest>() { @java.lang.Override public ImportRagFilesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportRagFilesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportRagFilesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ImportRagFilesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,021
java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/SslConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1/resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1; /** * * * <pre> * SSL configuration. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.SslConfig} */ public final class SslConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.SslConfig) SslConfigOrBuilder { private static final long serialVersionUID = 0L; // Use SslConfig.newBuilder() to construct. private SslConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SslConfig() { sslMode_ = 0; caSource_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SslConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ResourcesProto .internal_static_google_cloud_alloydb_v1_SslConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ResourcesProto .internal_static_google_cloud_alloydb_v1_SslConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.SslConfig.class, com.google.cloud.alloydb.v1.SslConfig.Builder.class); } /** * * * <pre> * SSL mode options. * </pre> * * Protobuf enum {@code google.cloud.alloydb.v1.SslConfig.SslMode} */ public enum SslMode implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * SSL mode is not specified. Defaults to ENCRYPTED_ONLY. * </pre> * * <code>SSL_MODE_UNSPECIFIED = 0;</code> */ SSL_MODE_UNSPECIFIED(0), /** * * * <pre> * SSL connections are optional. CA verification not enforced. * </pre> * * <code>SSL_MODE_ALLOW = 1 [deprecated = true];</code> */ @java.lang.Deprecated SSL_MODE_ALLOW(1), /** * * * <pre> * SSL connections are required. CA verification not enforced. * Clients may use locally self-signed certificates (default psql client * behavior). * </pre> * * <code>SSL_MODE_REQUIRE = 2 [deprecated = true];</code> */ @java.lang.Deprecated SSL_MODE_REQUIRE(2), /** * * * <pre> * SSL connections are required. CA verification enforced. * Clients must have certificates signed by a Cluster CA, for example, using * GenerateClientCertificate. * </pre> * * <code>SSL_MODE_VERIFY_CA = 3 [deprecated = true];</code> */ @java.lang.Deprecated SSL_MODE_VERIFY_CA(3), /** * * * <pre> * SSL connections are optional. CA verification not enforced. * </pre> * * <code>ALLOW_UNENCRYPTED_AND_ENCRYPTED = 4;</code> */ ALLOW_UNENCRYPTED_AND_ENCRYPTED(4), /** * * * <pre> * SSL connections are required. CA verification not enforced. * </pre> * * <code>ENCRYPTED_ONLY = 5;</code> */ ENCRYPTED_ONLY(5), UNRECOGNIZED(-1), ; /** * * * <pre> * SSL mode is not specified. Defaults to ENCRYPTED_ONLY. * </pre> * * <code>SSL_MODE_UNSPECIFIED = 0;</code> */ public static final int SSL_MODE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * SSL connections are optional. CA verification not enforced. * </pre> * * <code>SSL_MODE_ALLOW = 1 [deprecated = true];</code> */ @java.lang.Deprecated public static final int SSL_MODE_ALLOW_VALUE = 1; /** * * * <pre> * SSL connections are required. CA verification not enforced. * Clients may use locally self-signed certificates (default psql client * behavior). * </pre> * * <code>SSL_MODE_REQUIRE = 2 [deprecated = true];</code> */ @java.lang.Deprecated public static final int SSL_MODE_REQUIRE_VALUE = 2; /** * * * <pre> * SSL connections are required. CA verification enforced. * Clients must have certificates signed by a Cluster CA, for example, using * GenerateClientCertificate. * </pre> * * <code>SSL_MODE_VERIFY_CA = 3 [deprecated = true];</code> */ @java.lang.Deprecated public static final int SSL_MODE_VERIFY_CA_VALUE = 3; /** * * * <pre> * SSL connections are optional. CA verification not enforced. * </pre> * * <code>ALLOW_UNENCRYPTED_AND_ENCRYPTED = 4;</code> */ public static final int ALLOW_UNENCRYPTED_AND_ENCRYPTED_VALUE = 4; /** * * * <pre> * SSL connections are required. CA verification not enforced. * </pre> * * <code>ENCRYPTED_ONLY = 5;</code> */ public static final int ENCRYPTED_ONLY_VALUE = 5; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SslMode valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static SslMode forNumber(int value) { switch (value) { case 0: return SSL_MODE_UNSPECIFIED; case 1: return SSL_MODE_ALLOW; case 2: return SSL_MODE_REQUIRE; case 3: return SSL_MODE_VERIFY_CA; case 4: return ALLOW_UNENCRYPTED_AND_ENCRYPTED; case 5: return ENCRYPTED_ONLY; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<SslMode> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<SslMode> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<SslMode>() { public SslMode findValueByNumber(int number) { return SslMode.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.alloydb.v1.SslConfig.getDescriptor().getEnumTypes().get(0); } private static final SslMode[] VALUES = values(); public static SslMode valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private SslMode(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1.SslConfig.SslMode) } /** * * * <pre> * Certificate Authority (CA) source for SSL/TLS certificates. * </pre> * * Protobuf enum {@code google.cloud.alloydb.v1.SslConfig.CaSource} */ public enum CaSource implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Certificate Authority (CA) source not specified. Defaults to * CA_SOURCE_MANAGED. * </pre> * * <code>CA_SOURCE_UNSPECIFIED = 0;</code> */ CA_SOURCE_UNSPECIFIED(0), /** * * * <pre> * Certificate Authority (CA) managed by the AlloyDB Cluster. * </pre> * * <code>CA_SOURCE_MANAGED = 1;</code> */ CA_SOURCE_MANAGED(1), UNRECOGNIZED(-1), ; /** * * * <pre> * Certificate Authority (CA) source not specified. Defaults to * CA_SOURCE_MANAGED. * </pre> * * <code>CA_SOURCE_UNSPECIFIED = 0;</code> */ public static final int CA_SOURCE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Certificate Authority (CA) managed by the AlloyDB Cluster. * </pre> * * <code>CA_SOURCE_MANAGED = 1;</code> */ public static final int CA_SOURCE_MANAGED_VALUE = 1; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CaSource valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static CaSource forNumber(int value) { switch (value) { case 0: return CA_SOURCE_UNSPECIFIED; case 1: return CA_SOURCE_MANAGED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<CaSource> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<CaSource> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<CaSource>() { public CaSource findValueByNumber(int number) { return CaSource.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.alloydb.v1.SslConfig.getDescriptor().getEnumTypes().get(1); } private static final CaSource[] VALUES = values(); public static CaSource valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private CaSource(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1.SslConfig.CaSource) } public static final int SSL_MODE_FIELD_NUMBER = 1; private int sslMode_ = 0; /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for sslMode. */ @java.lang.Override public int getSslModeValue() { return sslMode_; } /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The sslMode. */ @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig.SslMode getSslMode() { com.google.cloud.alloydb.v1.SslConfig.SslMode result = com.google.cloud.alloydb.v1.SslConfig.SslMode.forNumber(sslMode_); return result == null ? com.google.cloud.alloydb.v1.SslConfig.SslMode.UNRECOGNIZED : result; } public static final int CA_SOURCE_FIELD_NUMBER = 2; private int caSource_ = 0; /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for caSource. */ @java.lang.Override public int getCaSourceValue() { return caSource_; } /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The caSource. */ @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig.CaSource getCaSource() { com.google.cloud.alloydb.v1.SslConfig.CaSource result = com.google.cloud.alloydb.v1.SslConfig.CaSource.forNumber(caSource_); return result == null ? com.google.cloud.alloydb.v1.SslConfig.CaSource.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sslMode_ != com.google.cloud.alloydb.v1.SslConfig.SslMode.SSL_MODE_UNSPECIFIED.getNumber()) { output.writeEnum(1, sslMode_); } if (caSource_ != com.google.cloud.alloydb.v1.SslConfig.CaSource.CA_SOURCE_UNSPECIFIED.getNumber()) { output.writeEnum(2, caSource_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sslMode_ != com.google.cloud.alloydb.v1.SslConfig.SslMode.SSL_MODE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, sslMode_); } if (caSource_ != com.google.cloud.alloydb.v1.SslConfig.CaSource.CA_SOURCE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, caSource_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1.SslConfig)) { return super.equals(obj); } com.google.cloud.alloydb.v1.SslConfig other = (com.google.cloud.alloydb.v1.SslConfig) obj; if (sslMode_ != other.sslMode_) return false; if (caSource_ != other.caSource_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SSL_MODE_FIELD_NUMBER; hash = (53 * hash) + sslMode_; hash = (37 * hash) + CA_SOURCE_FIELD_NUMBER; hash = (53 * hash) + caSource_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1.SslConfig parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.SslConfig parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.SslConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.SslConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.alloydb.v1.SslConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * SSL configuration. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.SslConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.SslConfig) com.google.cloud.alloydb.v1.SslConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ResourcesProto .internal_static_google_cloud_alloydb_v1_SslConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ResourcesProto .internal_static_google_cloud_alloydb_v1_SslConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.SslConfig.class, com.google.cloud.alloydb.v1.SslConfig.Builder.class); } // Construct using com.google.cloud.alloydb.v1.SslConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; sslMode_ = 0; caSource_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1.ResourcesProto .internal_static_google_cloud_alloydb_v1_SslConfig_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig getDefaultInstanceForType() { return com.google.cloud.alloydb.v1.SslConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig build() { com.google.cloud.alloydb.v1.SslConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig buildPartial() { com.google.cloud.alloydb.v1.SslConfig result = new com.google.cloud.alloydb.v1.SslConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1.SslConfig result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.sslMode_ = sslMode_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.caSource_ = caSource_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1.SslConfig) { return mergeFrom((com.google.cloud.alloydb.v1.SslConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1.SslConfig other) { if (other == com.google.cloud.alloydb.v1.SslConfig.getDefaultInstance()) return this; if (other.sslMode_ != 0) { setSslModeValue(other.getSslModeValue()); } if (other.caSource_ != 0) { setCaSourceValue(other.getCaSourceValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { sslMode_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { caSource_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int sslMode_ = 0; /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for sslMode. */ @java.lang.Override public int getSslModeValue() { return sslMode_; } /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for sslMode to set. * @return This builder for chaining. */ public Builder setSslModeValue(int value) { sslMode_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The sslMode. */ @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig.SslMode getSslMode() { com.google.cloud.alloydb.v1.SslConfig.SslMode result = com.google.cloud.alloydb.v1.SslConfig.SslMode.forNumber(sslMode_); return result == null ? com.google.cloud.alloydb.v1.SslConfig.SslMode.UNRECOGNIZED : result; } /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The sslMode to set. * @return This builder for chaining. */ public Builder setSslMode(com.google.cloud.alloydb.v1.SslConfig.SslMode value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; sslMode_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. SSL mode. Specifies client-server SSL/TLS connection behavior. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.SslMode ssl_mode = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearSslMode() { bitField0_ = (bitField0_ & ~0x00000001); sslMode_ = 0; onChanged(); return this; } private int caSource_ = 0; /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for caSource. */ @java.lang.Override public int getCaSourceValue() { return caSource_; } /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for caSource to set. * @return This builder for chaining. */ public Builder setCaSourceValue(int value) { caSource_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The caSource. */ @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig.CaSource getCaSource() { com.google.cloud.alloydb.v1.SslConfig.CaSource result = com.google.cloud.alloydb.v1.SslConfig.CaSource.forNumber(caSource_); return result == null ? com.google.cloud.alloydb.v1.SslConfig.CaSource.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The caSource to set. * @return This builder for chaining. */ public Builder setCaSource(com.google.cloud.alloydb.v1.SslConfig.CaSource value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; caSource_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Certificate Authority (CA) source. Only CA_SOURCE_MANAGED is * supported currently, and is the default value. * </pre> * * <code> * .google.cloud.alloydb.v1.SslConfig.CaSource ca_source = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearCaSource() { bitField0_ = (bitField0_ & ~0x00000002); caSource_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.SslConfig) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.SslConfig) private static final com.google.cloud.alloydb.v1.SslConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.SslConfig(); } public static com.google.cloud.alloydb.v1.SslConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SslConfig> PARSER = new com.google.protobuf.AbstractParser<SslConfig>() { @java.lang.Override public SslConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SslConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SslConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1.SslConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,080
java-telcoautomation/proto-google-cloud-telcoautomation-v1alpha1/src/main/java/com/google/cloud/telcoautomation/v1alpha1/UpdateDeploymentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/telcoautomation/v1alpha1/telcoautomation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.telcoautomation.v1alpha1; /** * * * <pre> * Request object for `UpdateDeployment`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest} */ public final class UpdateDeploymentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) UpdateDeploymentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateDeploymentRequest.newBuilder() to construct. private UpdateDeploymentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateDeploymentRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateDeploymentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1alpha1_UpdateDeploymentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1alpha1_UpdateDeploymentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.class, com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.Builder.class); } private int bitField0_; public static final int DEPLOYMENT_FIELD_NUMBER = 1; private com.google.cloud.telcoautomation.v1alpha1.Deployment deployment_; /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the deployment field is set. */ @java.lang.Override public boolean hasDeployment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The deployment. */ @java.lang.Override public com.google.cloud.telcoautomation.v1alpha1.Deployment getDeployment() { return deployment_ == null ? com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance() : deployment_; } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder getDeploymentOrBuilder() { return deployment_ == null ? com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance() : deployment_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getDeployment()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDeployment()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest)) { return super.equals(obj); } com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest other = (com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) obj; if (hasDeployment() != other.hasDeployment()) return false; if (hasDeployment()) { if (!getDeployment().equals(other.getDeployment())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDeployment()) { hash = (37 * hash) + DEPLOYMENT_FIELD_NUMBER; hash = (53 * hash) + getDeployment().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request object for `UpdateDeployment`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1alpha1_UpdateDeploymentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1alpha1_UpdateDeploymentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.class, com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.Builder.class); } // Construct using // com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDeploymentFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; deployment_ = null; if (deploymentBuilder_ != null) { deploymentBuilder_.dispose(); deploymentBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1alpha1_UpdateDeploymentRequest_descriptor; } @java.lang.Override public com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest getDefaultInstanceForType() { return com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest build() { com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest buildPartial() { com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest result = new com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.deployment_ = deploymentBuilder_ == null ? deployment_ : deploymentBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) { return mergeFrom((com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest other) { if (other == com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest.getDefaultInstance()) return this; if (other.hasDeployment()) { mergeDeployment(other.getDeployment()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getDeploymentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.telcoautomation.v1alpha1.Deployment deployment_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.telcoautomation.v1alpha1.Deployment, com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder, com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder> deploymentBuilder_; /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the deployment field is set. */ public boolean hasDeployment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The deployment. */ public com.google.cloud.telcoautomation.v1alpha1.Deployment getDeployment() { if (deploymentBuilder_ == null) { return deployment_ == null ? com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance() : deployment_; } else { return deploymentBuilder_.getMessage(); } } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDeployment(com.google.cloud.telcoautomation.v1alpha1.Deployment value) { if (deploymentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } deployment_ = value; } else { deploymentBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDeployment( com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder builderForValue) { if (deploymentBuilder_ == null) { deployment_ = builderForValue.build(); } else { deploymentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDeployment(com.google.cloud.telcoautomation.v1alpha1.Deployment value) { if (deploymentBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && deployment_ != null && deployment_ != com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance()) { getDeploymentBuilder().mergeFrom(value); } else { deployment_ = value; } } else { deploymentBuilder_.mergeFrom(value); } if (deployment_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDeployment() { bitField0_ = (bitField0_ & ~0x00000001); deployment_ = null; if (deploymentBuilder_ != null) { deploymentBuilder_.dispose(); deploymentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder getDeploymentBuilder() { bitField0_ |= 0x00000001; onChanged(); return getDeploymentFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder getDeploymentOrBuilder() { if (deploymentBuilder_ != null) { return deploymentBuilder_.getMessageOrBuilder(); } else { return deployment_ == null ? com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance() : deployment_; } } /** * * * <pre> * Required. The `deployment` to update. * </pre> * * <code> * .google.cloud.telcoautomation.v1alpha1.Deployment deployment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.telcoautomation.v1alpha1.Deployment, com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder, com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder> getDeploymentFieldBuilder() { if (deploymentBuilder_ == null) { deploymentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.telcoautomation.v1alpha1.Deployment, com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder, com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder>( getDeployment(), getParentForChildren(), isClean()); deployment_ = null; } return deploymentBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Update mask is used to specify the fields to be overwritten in * the `deployment` resource by the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest) private static final com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest(); } public static com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateDeploymentRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateDeploymentRequest>() { @java.lang.Override public UpdateDeploymentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateDeploymentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateDeploymentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.telcoautomation.v1alpha1.UpdateDeploymentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }